var/home/core/zuul-output/0000755000175000017500000000000015113372020014517 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113403314015464 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004704675215113403306017710 0ustar rootrootDec 01 20:04:39 crc systemd[1]: Starting Kubernetes Kubelet... Dec 01 20:04:39 crc restorecon[4695]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:39 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 20:04:40 crc restorecon[4695]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 01 20:04:40 crc kubenswrapper[4852]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.166781 4852 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171007 4852 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171034 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171039 4852 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171043 4852 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171048 4852 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171053 4852 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171057 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171061 4852 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171065 4852 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171069 4852 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171073 4852 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171077 4852 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171081 4852 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171085 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171089 4852 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171092 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171096 4852 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171100 4852 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171104 4852 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171107 4852 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171111 4852 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171115 4852 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171118 4852 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171121 4852 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171125 4852 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171129 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171132 4852 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171135 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171139 4852 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171144 4852 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171151 4852 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171155 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171159 4852 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171163 4852 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171172 4852 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171177 4852 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171181 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171185 4852 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171188 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171192 4852 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171196 4852 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171200 4852 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171203 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171207 4852 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171211 4852 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171214 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171220 4852 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171224 4852 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171228 4852 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171232 4852 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171235 4852 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171239 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171244 4852 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171248 4852 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171252 4852 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171255 4852 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171259 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171263 4852 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171266 4852 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171269 4852 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171273 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171277 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171280 4852 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171283 4852 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171288 4852 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171291 4852 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171295 4852 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171301 4852 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171306 4852 feature_gate.go:330] unrecognized feature gate: Example Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171310 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.171314 4852 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171665 4852 flags.go:64] FLAG: --address="0.0.0.0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171682 4852 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171695 4852 flags.go:64] FLAG: --anonymous-auth="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171701 4852 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171708 4852 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171713 4852 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171720 4852 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171726 4852 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171732 4852 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171736 4852 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171741 4852 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171746 4852 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171751 4852 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171755 4852 flags.go:64] FLAG: --cgroup-root="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171759 4852 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171763 4852 flags.go:64] FLAG: --client-ca-file="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171767 4852 flags.go:64] FLAG: --cloud-config="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171772 4852 flags.go:64] FLAG: --cloud-provider="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171776 4852 flags.go:64] FLAG: --cluster-dns="[]" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171782 4852 flags.go:64] FLAG: --cluster-domain="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171786 4852 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171791 4852 flags.go:64] FLAG: --config-dir="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171795 4852 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171800 4852 flags.go:64] FLAG: --container-log-max-files="5" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171806 4852 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171810 4852 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171814 4852 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171819 4852 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171824 4852 flags.go:64] FLAG: --contention-profiling="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171828 4852 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171832 4852 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171837 4852 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171841 4852 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171848 4852 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171852 4852 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171857 4852 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171861 4852 flags.go:64] FLAG: --enable-load-reader="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171865 4852 flags.go:64] FLAG: --enable-server="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171869 4852 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171876 4852 flags.go:64] FLAG: --event-burst="100" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171881 4852 flags.go:64] FLAG: --event-qps="50" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171885 4852 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171890 4852 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171894 4852 flags.go:64] FLAG: --eviction-hard="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171900 4852 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171904 4852 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171908 4852 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171915 4852 flags.go:64] FLAG: --eviction-soft="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171919 4852 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171923 4852 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171927 4852 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171932 4852 flags.go:64] FLAG: --experimental-mounter-path="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171936 4852 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171940 4852 flags.go:64] FLAG: --fail-swap-on="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171944 4852 flags.go:64] FLAG: --feature-gates="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171950 4852 flags.go:64] FLAG: --file-check-frequency="20s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171954 4852 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171959 4852 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171963 4852 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171967 4852 flags.go:64] FLAG: --healthz-port="10248" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171971 4852 flags.go:64] FLAG: --help="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171976 4852 flags.go:64] FLAG: --hostname-override="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171980 4852 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171984 4852 flags.go:64] FLAG: --http-check-frequency="20s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171988 4852 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171992 4852 flags.go:64] FLAG: --image-credential-provider-config="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.171996 4852 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172001 4852 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172005 4852 flags.go:64] FLAG: --image-service-endpoint="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172009 4852 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172013 4852 flags.go:64] FLAG: --kube-api-burst="100" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172017 4852 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172022 4852 flags.go:64] FLAG: --kube-api-qps="50" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172026 4852 flags.go:64] FLAG: --kube-reserved="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172030 4852 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172034 4852 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172038 4852 flags.go:64] FLAG: --kubelet-cgroups="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172042 4852 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172046 4852 flags.go:64] FLAG: --lock-file="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172050 4852 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172054 4852 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172059 4852 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172066 4852 flags.go:64] FLAG: --log-json-split-stream="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172071 4852 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172075 4852 flags.go:64] FLAG: --log-text-split-stream="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172079 4852 flags.go:64] FLAG: --logging-format="text" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172083 4852 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172089 4852 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172093 4852 flags.go:64] FLAG: --manifest-url="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172097 4852 flags.go:64] FLAG: --manifest-url-header="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172104 4852 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172108 4852 flags.go:64] FLAG: --max-open-files="1000000" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172114 4852 flags.go:64] FLAG: --max-pods="110" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172118 4852 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172122 4852 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172127 4852 flags.go:64] FLAG: --memory-manager-policy="None" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172132 4852 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172136 4852 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172140 4852 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172145 4852 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172158 4852 flags.go:64] FLAG: --node-status-max-images="50" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172163 4852 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172167 4852 flags.go:64] FLAG: --oom-score-adj="-999" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172171 4852 flags.go:64] FLAG: --pod-cidr="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172176 4852 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172183 4852 flags.go:64] FLAG: --pod-manifest-path="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172187 4852 flags.go:64] FLAG: --pod-max-pids="-1" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172191 4852 flags.go:64] FLAG: --pods-per-core="0" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172195 4852 flags.go:64] FLAG: --port="10250" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172199 4852 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172203 4852 flags.go:64] FLAG: --provider-id="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172207 4852 flags.go:64] FLAG: --qos-reserved="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172212 4852 flags.go:64] FLAG: --read-only-port="10255" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172216 4852 flags.go:64] FLAG: --register-node="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172220 4852 flags.go:64] FLAG: --register-schedulable="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172224 4852 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172232 4852 flags.go:64] FLAG: --registry-burst="10" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172236 4852 flags.go:64] FLAG: --registry-qps="5" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172240 4852 flags.go:64] FLAG: --reserved-cpus="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172246 4852 flags.go:64] FLAG: --reserved-memory="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172256 4852 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172261 4852 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172265 4852 flags.go:64] FLAG: --rotate-certificates="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172269 4852 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172273 4852 flags.go:64] FLAG: --runonce="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172278 4852 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172282 4852 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172286 4852 flags.go:64] FLAG: --seccomp-default="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172290 4852 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172294 4852 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172298 4852 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172303 4852 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172307 4852 flags.go:64] FLAG: --storage-driver-password="root" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172312 4852 flags.go:64] FLAG: --storage-driver-secure="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172316 4852 flags.go:64] FLAG: --storage-driver-table="stats" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172320 4852 flags.go:64] FLAG: --storage-driver-user="root" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172324 4852 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172328 4852 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172333 4852 flags.go:64] FLAG: --system-cgroups="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172337 4852 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172344 4852 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172349 4852 flags.go:64] FLAG: --tls-cert-file="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172353 4852 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172359 4852 flags.go:64] FLAG: --tls-min-version="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172363 4852 flags.go:64] FLAG: --tls-private-key-file="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172367 4852 flags.go:64] FLAG: --topology-manager-policy="none" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172371 4852 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172376 4852 flags.go:64] FLAG: --topology-manager-scope="container" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172380 4852 flags.go:64] FLAG: --v="2" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172386 4852 flags.go:64] FLAG: --version="false" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172392 4852 flags.go:64] FLAG: --vmodule="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172397 4852 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172402 4852 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172542 4852 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172547 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172553 4852 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172558 4852 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172562 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172566 4852 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172569 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172573 4852 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172577 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172580 4852 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172584 4852 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172587 4852 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172592 4852 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172597 4852 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172601 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172605 4852 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172613 4852 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172616 4852 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172620 4852 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172623 4852 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172627 4852 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172630 4852 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172634 4852 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172638 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172643 4852 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172647 4852 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172651 4852 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172655 4852 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172660 4852 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172663 4852 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172668 4852 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172672 4852 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172675 4852 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172679 4852 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172682 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172686 4852 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172689 4852 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172693 4852 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172697 4852 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172700 4852 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172704 4852 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172708 4852 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172712 4852 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172716 4852 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172721 4852 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172724 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172729 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172732 4852 feature_gate.go:330] unrecognized feature gate: Example Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172738 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172742 4852 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172745 4852 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172749 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172752 4852 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172756 4852 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172759 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172763 4852 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172766 4852 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172770 4852 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172774 4852 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172777 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172780 4852 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172784 4852 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172788 4852 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172792 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172796 4852 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172799 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172803 4852 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172807 4852 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172810 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172813 4852 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.172817 4852 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.172830 4852 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.182355 4852 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.182377 4852 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182502 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182514 4852 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182520 4852 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182527 4852 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182534 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182540 4852 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182544 4852 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182548 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182552 4852 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182556 4852 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182560 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182563 4852 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182567 4852 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182571 4852 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182575 4852 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182579 4852 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182583 4852 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182587 4852 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182592 4852 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182598 4852 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182603 4852 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182607 4852 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182611 4852 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182614 4852 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182618 4852 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182621 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182625 4852 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182629 4852 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182632 4852 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182636 4852 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182641 4852 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182646 4852 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182650 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182654 4852 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182660 4852 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182663 4852 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182667 4852 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182670 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182674 4852 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182677 4852 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182681 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182684 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182688 4852 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182691 4852 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182695 4852 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182698 4852 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182701 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182705 4852 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182708 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182712 4852 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182715 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182719 4852 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182722 4852 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182726 4852 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182730 4852 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182733 4852 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182736 4852 feature_gate.go:330] unrecognized feature gate: Example Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182740 4852 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182743 4852 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182747 4852 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182750 4852 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182754 4852 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182757 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182763 4852 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182766 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182770 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182773 4852 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182776 4852 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182781 4852 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182785 4852 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182790 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.182796 4852 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182905 4852 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182911 4852 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182915 4852 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182919 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182923 4852 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182926 4852 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182930 4852 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182933 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182937 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182940 4852 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182944 4852 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182947 4852 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182951 4852 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182955 4852 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182958 4852 feature_gate.go:330] unrecognized feature gate: Example Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182962 4852 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182966 4852 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182971 4852 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182976 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182980 4852 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182984 4852 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182987 4852 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182991 4852 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182995 4852 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.182999 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183002 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183006 4852 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183010 4852 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183014 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183019 4852 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183023 4852 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183028 4852 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183032 4852 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183037 4852 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183043 4852 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183049 4852 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183055 4852 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183060 4852 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183064 4852 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183069 4852 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183073 4852 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183077 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183082 4852 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183086 4852 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183091 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183095 4852 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183100 4852 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183104 4852 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183108 4852 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183112 4852 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183117 4852 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183120 4852 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183124 4852 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183128 4852 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183134 4852 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183138 4852 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183141 4852 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183145 4852 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183149 4852 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183153 4852 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183156 4852 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183160 4852 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183164 4852 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183168 4852 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183171 4852 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183175 4852 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183179 4852 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183182 4852 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183186 4852 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183189 4852 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.183194 4852 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.183201 4852 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.183553 4852 server.go:940] "Client rotation is on, will bootstrap in background" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.186110 4852 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.186175 4852 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.186986 4852 server.go:997] "Starting client certificate rotation" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.187005 4852 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.187196 4852 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-06 20:43:06.889126718 +0000 UTC Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.187277 4852 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 120h38m26.701854358s for next certificate rotation Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.192584 4852 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.193961 4852 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.204748 4852 log.go:25] "Validated CRI v1 runtime API" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.229147 4852 log.go:25] "Validated CRI v1 image API" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.232515 4852 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.236332 4852 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-01-20-00-26-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.236362 4852 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.250069 4852 manager.go:217] Machine: {Timestamp:2025-12-01 20:04:40.248680866 +0000 UTC m=+0.175762293 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:82eb6500-1744-4a7f-824c-21d40af3b228 BootID:7f1d127a-f97f-4747-ace7-8885db8f5b08 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:9e:a9:1c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:9e:a9:1c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:30:cb:3a Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:9f:c4:1b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f7:2f:f1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:2f:32:54 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:4a:57:a8:54:ab:f0 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8a:70:49:ac:2e:df Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.250280 4852 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.250541 4852 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.250807 4852 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.250968 4852 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251002 4852 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251175 4852 topology_manager.go:138] "Creating topology manager with none policy" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251185 4852 container_manager_linux.go:303] "Creating device plugin manager" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251414 4852 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251440 4852 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251815 4852 state_mem.go:36] "Initialized new in-memory state store" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.251899 4852 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.252686 4852 kubelet.go:418] "Attempting to sync node with API server" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.252704 4852 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.252723 4852 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.252734 4852 kubelet.go:324] "Adding apiserver pod source" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.252746 4852 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.254564 4852 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.254917 4852 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.255690 4852 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256185 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256206 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256213 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256219 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256230 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256237 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256244 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256256 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256265 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256273 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256283 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256290 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256523 4852 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.256639 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.256752 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.256742 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.256868 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.256959 4852 server.go:1280] "Started kubelet" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.257109 4852 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.257152 4852 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.257094 4852 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.258188 4852 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 01 20:04:40 crc systemd[1]: Started Kubernetes Kubelet. Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.261161 4852 server.go:460] "Adding debug handlers to kubelet server" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.261365 4852 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.261415 4852 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.261627 4852 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 06:54:14.044667829 +0000 UTC Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.261978 4852 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.262000 4852 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.262163 4852 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.265642 4852 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.266092 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.266228 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.266171 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="200ms" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.266644 4852 factory.go:55] Registering systemd factory Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.266679 4852 factory.go:221] Registration of the systemd container factory successfully Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.267402 4852 factory.go:153] Registering CRI-O factory Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.267421 4852 factory.go:221] Registration of the crio container factory successfully Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.267569 4852 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.267610 4852 factory.go:103] Registering Raw factory Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.267635 4852 manager.go:1196] Started watching for new ooms in manager Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.268827 4852 manager.go:319] Starting recovery of all containers Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.269581 4852 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.219:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d30116472e1ce default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 20:04:40.256930254 +0000 UTC m=+0.184011661,LastTimestamp:2025-12-01 20:04:40.256930254 +0000 UTC m=+0.184011661,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.274928 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275396 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275415 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275430 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275442 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275471 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275484 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275497 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275512 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275525 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275537 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275551 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275563 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275579 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275593 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275614 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275627 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275639 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275681 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275692 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275703 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275717 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275727 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275740 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275754 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275769 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275788 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275801 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275815 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275828 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275842 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275883 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275897 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275910 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275923 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275935 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275948 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275961 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275977 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.275990 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276002 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276013 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276025 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276036 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276045 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276055 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276065 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276075 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276086 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276096 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276104 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276114 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276128 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276137 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276149 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276158 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276199 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276214 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276224 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276233 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276242 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276250 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276259 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276273 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276283 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276296 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276308 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276317 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276326 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276336 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276348 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276361 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276371 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276382 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276391 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276399 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276408 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276417 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276426 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276435 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276445 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276500 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276511 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276519 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276528 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276538 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276548 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276557 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276571 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276580 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276589 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276597 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276607 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276616 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276630 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276640 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276650 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276662 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276675 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276686 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276697 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276709 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276722 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276733 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276750 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276765 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276777 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276789 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276800 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276811 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276823 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276835 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276901 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276915 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276927 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276938 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276949 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276960 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276977 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.276988 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277613 4852 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277648 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277666 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277681 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277696 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277712 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277724 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277737 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277749 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277759 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277768 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277776 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277785 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277795 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277805 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277813 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277857 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277867 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277876 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277884 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277893 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277902 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277911 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277929 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277939 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277948 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277958 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277967 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277976 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277985 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.277996 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278011 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278024 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278036 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278049 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278059 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278068 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278079 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278087 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278097 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278107 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278115 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278125 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278133 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278142 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278152 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278161 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278169 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278178 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278186 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278194 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278205 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278215 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278224 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278232 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278241 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278250 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278258 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278267 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278275 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278283 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278292 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278302 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278311 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278322 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278331 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278340 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278352 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278361 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278371 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278406 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278416 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278423 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278432 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278441 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278463 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278473 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278481 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278491 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278500 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278508 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278517 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278526 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278535 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278545 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278554 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278562 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278572 4852 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278581 4852 reconstruct.go:97] "Volume reconstruction finished" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.278588 4852 reconciler.go:26] "Reconciler: start to sync state" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.308191 4852 manager.go:324] Recovery completed Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.316531 4852 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.318582 4852 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.318639 4852 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.318677 4852 kubelet.go:2335] "Starting kubelet main sync loop" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.318766 4852 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.319883 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.319967 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.325899 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.327247 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.327304 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.327322 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.328059 4852 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.328139 4852 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.328199 4852 state_mem.go:36] "Initialized new in-memory state store" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.339664 4852 policy_none.go:49] "None policy: Start" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.340416 4852 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.340475 4852 state_mem.go:35] "Initializing new in-memory state store" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.366492 4852 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.408416 4852 manager.go:334] "Starting Device Plugin manager" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.408538 4852 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.408551 4852 server.go:79] "Starting device plugin registration server" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.408993 4852 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.409012 4852 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.409151 4852 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.409279 4852 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.409295 4852 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.413950 4852 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.419128 4852 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.419196 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.419954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.420008 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.420022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.420203 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.420365 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.420406 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421256 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421269 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421276 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421294 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421304 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421397 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421446 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.421659 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422383 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422428 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422443 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422430 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422492 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422575 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422695 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.422733 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423469 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423477 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423481 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423504 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423519 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423705 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423799 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.423823 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424539 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424554 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424669 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.424687 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.425383 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.425412 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.425438 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.466964 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="400ms" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480584 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480622 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480669 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480690 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480736 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480756 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480775 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480815 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480898 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480940 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480959 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.480980 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.481043 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.481094 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.481128 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.509537 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.513165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.513207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.513216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.513239 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.513635 4852 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.219:6443: connect: connection refused" node="crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582329 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582407 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582436 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582510 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582557 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582599 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582644 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582677 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582690 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582744 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582651 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582728 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582613 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582732 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582869 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582900 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582919 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582955 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582985 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.582979 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583000 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583048 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583050 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583076 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583061 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583136 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583180 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.583321 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.713725 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.715196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.715261 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.715279 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.715316 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.715816 4852 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.219:6443: connect: connection refused" node="crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.747166 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.753096 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.776549 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9f6b69387244611fc72bb9db177c6cd957d673df9c9e0489d8d6365459b7c795 WatchSource:0}: Error finding container 9f6b69387244611fc72bb9db177c6cd957d673df9c9e0489d8d6365459b7c795: Status 404 returned error can't find the container with id 9f6b69387244611fc72bb9db177c6cd957d673df9c9e0489d8d6365459b7c795 Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.776903 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ef16e81323417b118d83e9060ec7fc48364c47f817e4f988f093c83b02294be3 WatchSource:0}: Error finding container ef16e81323417b118d83e9060ec7fc48364c47f817e4f988f093c83b02294be3: Status 404 returned error can't find the container with id ef16e81323417b118d83e9060ec7fc48364c47f817e4f988f093c83b02294be3 Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.778227 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.787897 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.789783 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-1043466e066e00bf4a72a07365be8cbd2da4aa4d461affd24f9a3446ef85211c WatchSource:0}: Error finding container 1043466e066e00bf4a72a07365be8cbd2da4aa4d461affd24f9a3446ef85211c: Status 404 returned error can't find the container with id 1043466e066e00bf4a72a07365be8cbd2da4aa4d461affd24f9a3446ef85211c Dec 01 20:04:40 crc kubenswrapper[4852]: I1201 20:04:40.792726 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:40 crc kubenswrapper[4852]: W1201 20:04:40.810801 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-0b2e4f0b59466693dc457dd5ff971fcccaaddfa15388f0808bddb6be47b3585c WatchSource:0}: Error finding container 0b2e4f0b59466693dc457dd5ff971fcccaaddfa15388f0808bddb6be47b3585c: Status 404 returned error can't find the container with id 0b2e4f0b59466693dc457dd5ff971fcccaaddfa15388f0808bddb6be47b3585c Dec 01 20:04:40 crc kubenswrapper[4852]: E1201 20:04:40.868095 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="800ms" Dec 01 20:04:41 crc kubenswrapper[4852]: W1201 20:04:41.069427 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.069535 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.115963 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.120567 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.120647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.120662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.120690 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.121075 4852 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.219:6443: connect: connection refused" node="crc" Dec 01 20:04:41 crc kubenswrapper[4852]: W1201 20:04:41.239033 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.239119 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.258159 4852 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.262145 4852 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 06:39:25.535868288 +0000 UTC Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.262227 4852 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 346h34m44.273644126s for next certificate rotation Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.325947 4852 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="82590b812b38038c8a75c44ccef03674505f5c7f6642613b62aa495d55183121" exitCode=0 Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.326040 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"82590b812b38038c8a75c44ccef03674505f5c7f6642613b62aa495d55183121"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.326144 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0b2e4f0b59466693dc457dd5ff971fcccaaddfa15388f0808bddb6be47b3585c"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.326283 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.327370 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.327400 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.327411 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.327990 4852 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e" exitCode=0 Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.328056 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.328079 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"1043466e066e00bf4a72a07365be8cbd2da4aa4d461affd24f9a3446ef85211c"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.328141 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.328964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.328991 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.329001 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.330296 4852 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c" exitCode=0 Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.330399 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.330528 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9f6b69387244611fc72bb9db177c6cd957d673df9c9e0489d8d6365459b7c795"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.330710 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.332909 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.332938 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.332956 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.334132 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.334176 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ef16e81323417b118d83e9060ec7fc48364c47f817e4f988f093c83b02294be3"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.337898 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0" exitCode=0 Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.337963 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.338012 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b1a2fd685fd058cd91d70d4ae847ef5ee220a772a2d34522dd4df016f1f32f7c"} Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.338186 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.339320 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.339385 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.339411 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.342972 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.343778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.343810 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.343823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: W1201 20:04:41.369712 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.369821 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:41 crc kubenswrapper[4852]: W1201 20:04:41.380078 4852 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.380180 4852 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.219:6443: connect: connection refused" logger="UnhandledError" Dec 01 20:04:41 crc kubenswrapper[4852]: E1201 20:04:41.669680 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="1.6s" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.921840 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.923252 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.923305 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.923318 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:41 crc kubenswrapper[4852]: I1201 20:04:41.923358 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.348791 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.348858 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.348879 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.349010 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.350502 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.350535 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.350550 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354112 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354150 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354171 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354189 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354206 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.354314 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.355283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.355320 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.355337 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.357237 4852 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d20fc94f4fbec28f57b7190322cf0505189d50e8c7902f2448f7a3f53e255b12" exitCode=0 Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.357304 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d20fc94f4fbec28f57b7190322cf0505189d50e8c7902f2448f7a3f53e255b12"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.357478 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.358404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.358439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.358481 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.361322 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8ff2e5b90958523977e4150eafee9e93676c4cadf56821d67af12044d1531e6b"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.361653 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.362787 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.362823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.362841 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.364092 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.364143 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.364164 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd"} Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.364317 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.365608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.365649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.365668 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:42 crc kubenswrapper[4852]: I1201 20:04:42.842541 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.370307 4852 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="241677e14eab0ce670ec2e00b3cbd12b59fc7b27fa43bea77eef2840483f567a" exitCode=0 Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.370552 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.370667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"241677e14eab0ce670ec2e00b3cbd12b59fc7b27fa43bea77eef2840483f567a"} Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.370779 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.370918 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.371872 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.371911 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.371932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372405 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372534 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372584 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372598 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372759 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.372771 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.508220 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.508402 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.508489 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.509885 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.509929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:43 crc kubenswrapper[4852]: I1201 20:04:43.509943 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.383821 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"487c56341dbb838a79f3e9339cc98abedbdbe164f512916fd41e74d18bb39ea2"} Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.383891 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4ba759515b845f4fdbd34fa45f8f476f7b4a3ca69621a78bd99223aa2eb4df4"} Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.383917 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2d324586f5ac5b3b8bb1a699f6d2202afdc3ad1ba8c89e7611f00173747a594a"} Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.383936 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0a4d9aa6f9f78cfacd8afa0106a4c6bfe2e358e5ca0eb9ccb2280850f17e350c"} Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.383963 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.385226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.385274 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:44 crc kubenswrapper[4852]: I1201 20:04:44.385287 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:45 crc kubenswrapper[4852]: I1201 20:04:45.393690 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bb3f9d7c435d7ecd3c2f3fe8f43aa7a8796aa281f6414a6419558547c960c41f"} Dec 01 20:04:45 crc kubenswrapper[4852]: I1201 20:04:45.393892 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:45 crc kubenswrapper[4852]: I1201 20:04:45.395209 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:45 crc kubenswrapper[4852]: I1201 20:04:45.395257 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:45 crc kubenswrapper[4852]: I1201 20:04:45.395270 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.159572 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.159888 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.159968 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.162029 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.162127 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.162165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.396110 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.397442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.397534 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:46 crc kubenswrapper[4852]: I1201 20:04:46.397555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.311402 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.311675 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.313269 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.313336 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.313363 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.319239 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.361224 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.398367 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.399637 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.399700 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.399723 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.581338 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.581619 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.583303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.583380 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:47 crc kubenswrapper[4852]: I1201 20:04:47.583399 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.401283 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.402215 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.402253 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.402264 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.806698 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.806926 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.808360 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.808426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:48 crc kubenswrapper[4852]: I1201 20:04:48.808484 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:49 crc kubenswrapper[4852]: I1201 20:04:49.629738 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:49 crc kubenswrapper[4852]: I1201 20:04:49.629997 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:49 crc kubenswrapper[4852]: I1201 20:04:49.631562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:49 crc kubenswrapper[4852]: I1201 20:04:49.631625 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:49 crc kubenswrapper[4852]: I1201 20:04:49.631649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:50 crc kubenswrapper[4852]: E1201 20:04:50.414101 4852 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 20:04:50 crc kubenswrapper[4852]: I1201 20:04:50.768392 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:50 crc kubenswrapper[4852]: I1201 20:04:50.768631 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:50 crc kubenswrapper[4852]: I1201 20:04:50.769927 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:50 crc kubenswrapper[4852]: I1201 20:04:50.769987 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:50 crc kubenswrapper[4852]: I1201 20:04:50.770004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:51 crc kubenswrapper[4852]: E1201 20:04:51.925096 4852 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 01 20:04:52 crc kubenswrapper[4852]: I1201 20:04:52.259689 4852 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 01 20:04:52 crc kubenswrapper[4852]: I1201 20:04:52.630674 4852 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 20:04:52 crc kubenswrapper[4852]: I1201 20:04:52.630795 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 20:04:53 crc kubenswrapper[4852]: E1201 20:04:53.271692 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.502537 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.502631 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.508308 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.508398 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.526182 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.527474 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.527521 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.527537 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:53 crc kubenswrapper[4852]: I1201 20:04:53.527568 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.291753 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.292045 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.293962 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.294030 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.294054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.325112 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.417884 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.419273 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.419367 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.419389 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:55 crc kubenswrapper[4852]: I1201 20:04:55.439384 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.165101 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.165366 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.166914 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.167012 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.167033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.169116 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.419947 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.420001 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421432 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421516 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421536 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421611 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:56 crc kubenswrapper[4852]: I1201 20:04:56.421694 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:57 crc kubenswrapper[4852]: I1201 20:04:57.367494 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:57 crc kubenswrapper[4852]: I1201 20:04:57.368013 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:04:57 crc kubenswrapper[4852]: I1201 20:04:57.369482 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:04:57 crc kubenswrapper[4852]: I1201 20:04:57.369521 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:04:57 crc kubenswrapper[4852]: I1201 20:04:57.369530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.497628 4852 trace.go:236] Trace[1208658080]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 20:04:43.847) (total time: 14650ms): Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[1208658080]: ---"Objects listed" error: 14650ms (20:04:58.497) Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[1208658080]: [14.650505685s] [14.650505685s] END Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.497684 4852 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.498930 4852 trace.go:236] Trace[423116844]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 20:04:44.309) (total time: 14189ms): Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[423116844]: ---"Objects listed" error: 14189ms (20:04:58.498) Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[423116844]: [14.189216489s] [14.189216489s] END Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.498965 4852 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.500264 4852 trace.go:236] Trace[1273160233]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 20:04:44.022) (total time: 14477ms): Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[1273160233]: ---"Objects listed" error: 14477ms (20:04:58.500) Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[1273160233]: [14.477728334s] [14.477728334s] END Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.500290 4852 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.500985 4852 trace.go:236] Trace[800920447]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 20:04:43.737) (total time: 14763ms): Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[800920447]: ---"Objects listed" error: 14763ms (20:04:58.500) Dec 01 20:04:58 crc kubenswrapper[4852]: Trace[800920447]: [14.763824913s] [14.763824913s] END Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.501003 4852 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.502129 4852 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 01 20:04:58 crc kubenswrapper[4852]: E1201 20:04:58.532436 4852 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.551552 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40186->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.551604 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40186->192.168.126.11:17697: read: connection reset by peer" Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.551645 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40198->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.551728 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40198->192.168.126.11:17697: read: connection reset by peer" Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.552125 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.552153 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.808096 4852 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 20:04:58 crc kubenswrapper[4852]: I1201 20:04:58.808163 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.264271 4852 apiserver.go:52] "Watching apiserver" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.268238 4852 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.268752 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.269430 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.269545 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.269730 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.269747 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.269853 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.270174 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.270323 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.270330 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.270373 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.273534 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.273681 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.274073 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.274444 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.274536 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.274786 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.274789 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.275017 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.276962 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.328942 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.352186 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.362630 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.363068 4852 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.375939 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.386545 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.396445 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.403592 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407575 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407619 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407646 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407668 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407688 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407706 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407729 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407784 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407821 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407842 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407864 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407886 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407907 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407930 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407950 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407971 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.407993 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408015 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408036 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408059 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408081 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408101 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408122 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408143 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408164 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408190 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408219 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408239 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408258 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408286 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408306 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408325 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408348 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408386 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408425 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408446 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408481 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408503 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408523 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408543 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408564 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408586 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408609 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408634 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408676 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408697 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408720 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408741 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408762 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408787 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408782 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408844 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408834 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408900 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408924 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408954 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408973 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.408977 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409019 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409041 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409060 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409077 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409082 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409094 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409097 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409113 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409122 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409132 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409230 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409287 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409310 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409326 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409331 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409340 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409379 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409417 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409486 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409517 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409540 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409568 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409597 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409627 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409984 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410015 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410040 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410069 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410092 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410115 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410138 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410159 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410177 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410203 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410222 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410242 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410262 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410284 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410301 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410319 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410337 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410363 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410383 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410418 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410441 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410489 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410511 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410540 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410570 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410594 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410614 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410635 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410655 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410727 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410747 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410765 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410783 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410804 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410826 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410847 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410867 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410885 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410904 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410923 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410950 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410974 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410994 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411013 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411034 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411057 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411078 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411101 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411125 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411152 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411175 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411197 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411222 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411254 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411286 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411314 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411336 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411357 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411379 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411402 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411425 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411462 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411490 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411512 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411534 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411554 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411581 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411609 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411632 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411654 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411676 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411698 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411719 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411740 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411762 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411784 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411814 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411840 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411861 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411882 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411904 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411923 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412259 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412294 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409482 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409552 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409580 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.409741 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410014 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410046 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410231 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.410336 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411089 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411151 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.411373 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412068 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412755 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412209 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412222 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412481 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412834 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412856 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412916 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413023 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413029 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413097 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413120 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413286 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413300 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413330 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.415675 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.416125 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.416592 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.416683 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412871 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413372 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.419698 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.417803 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.419887 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.417635 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.419938 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420052 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420100 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420135 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420168 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420196 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420215 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420253 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420409 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420596 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420618 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420727 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420765 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420912 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.420282 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421015 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421038 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421067 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421082 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.419945 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421104 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421114 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421132 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421112 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421512 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421536 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421545 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421554 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421613 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421632 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421656 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421681 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421697 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421741 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421760 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421778 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421796 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421751 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421814 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421837 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421857 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421880 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421908 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421927 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421945 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421961 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421978 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422022 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422040 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422056 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422074 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422090 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422108 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422171 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422195 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422376 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422404 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422424 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422445 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422476 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422494 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422535 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422553 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422593 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422613 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422661 4852 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422673 4852 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422688 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422698 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422708 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422723 4852 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422733 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422743 4852 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422754 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422764 4852 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422775 4852 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422785 4852 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421878 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.423407 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422832 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.421741 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422123 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422201 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422339 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422430 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.423610 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.423721 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422534 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422646 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422664 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.422711 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.423863 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:04:59.923825124 +0000 UTC m=+19.850906541 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422805 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.422521 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.423238 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.423145 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424116 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424186 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424513 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424623 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424645 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424644 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424661 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424682 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424658 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424735 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424801 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424809 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424823 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424841 4852 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424860 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424861 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424876 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424893 4852 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424905 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424911 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424924 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424918 4852 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424967 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424984 4852 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425001 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425016 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425033 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425048 4852 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425061 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425077 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425076 4852 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425092 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425109 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425125 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425140 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425154 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425168 4852 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425182 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425196 4852 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425208 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425223 4852 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425235 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425248 4852 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425260 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425273 4852 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425286 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425298 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425310 4852 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425324 4852 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425337 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425350 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425365 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425379 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425393 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425409 4852 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425425 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425530 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425545 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425108 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.424982 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425136 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425212 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425353 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.425522 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.425650 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:04:59.925635194 +0000 UTC m=+19.852716721 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425761 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.425768 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426006 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426038 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426037 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426067 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426090 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426220 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.426966 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427057 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427268 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427414 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427439 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427724 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.427820 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428117 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428222 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428233 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428413 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428529 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428744 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428804 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.428884 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429107 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429151 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429320 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429332 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429496 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429498 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.429929 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.430161 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.430394 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.430580 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.430842 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.431306 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.431564 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.431834 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.431948 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.412976 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.432101 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.433093 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.433193 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.433384 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.433668 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.433829 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.434071 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.434546 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.434791 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.435060 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.435139 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.435436 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.435444 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.413180 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.435511 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436028 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436159 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995" exitCode=255 Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436191 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995"} Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436707 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436724 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.436953 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437235 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437650 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437771 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437801 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437909 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.438286 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.438318 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.438337 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.438654 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439021 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439062 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439437 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439619 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439660 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.439815 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.440028 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.440047 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.440060 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.440106 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:04:59.940091392 +0000 UTC m=+19.867172799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.440382 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.440602 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:04:59.940594109 +0000 UTC m=+19.867675526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.440815 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441048 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441049 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441257 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441480 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441492 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.441594 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.441612 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.441622 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.441752 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:04:59.941744156 +0000 UTC m=+19.868825573 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.441716 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442088 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442127 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442297 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442433 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442479 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442668 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442694 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.442717 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443007 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443023 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443307 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443342 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443511 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443521 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443565 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443642 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444057 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444073 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.437597 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.443542 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444391 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444403 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444907 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.444952 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.450612 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.453098 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.454329 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.457610 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.458555 4852 scope.go:117] "RemoveContainer" containerID="0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.457408 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.460814 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.463299 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.467443 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.469046 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.478022 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.480672 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.486139 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.495170 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.502974 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528328 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528390 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528477 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528492 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528503 4852 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528513 4852 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528522 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528536 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528548 4852 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528559 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528569 4852 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528580 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528591 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528600 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528611 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528620 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528630 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528640 4852 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528651 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528662 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528674 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528685 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528695 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528708 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528719 4852 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528729 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528740 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528751 4852 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528762 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528773 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528786 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528798 4852 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528808 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528819 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528817 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528830 4852 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528879 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528889 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528899 4852 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528910 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528919 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528929 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528937 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528939 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528947 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528986 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.528998 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529009 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529021 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529032 4852 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529042 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529053 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529064 4852 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529075 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529085 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529095 4852 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529105 4852 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529115 4852 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529126 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529137 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529149 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529160 4852 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529170 4852 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529181 4852 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529192 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529202 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529212 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529222 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529244 4852 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529254 4852 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529264 4852 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529273 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529283 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529293 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529303 4852 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529313 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529324 4852 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529335 4852 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529345 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529354 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529365 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529376 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529466 4852 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529480 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529491 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529502 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529514 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529525 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529535 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529545 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529555 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529566 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529577 4852 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529588 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529606 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529621 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529631 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529641 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529652 4852 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529664 4852 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529674 4852 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529686 4852 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529696 4852 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529707 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529719 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529730 4852 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529740 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529751 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529761 4852 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529773 4852 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529783 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529794 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529804 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529814 4852 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529826 4852 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529836 4852 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529846 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529856 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529867 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529877 4852 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529888 4852 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529898 4852 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529908 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529919 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529931 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529943 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529955 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529965 4852 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529975 4852 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529986 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.529996 4852 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530007 4852 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530018 4852 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530031 4852 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530041 4852 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530051 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530063 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530074 4852 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.530084 4852 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.596244 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.604699 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 20:04:59 crc kubenswrapper[4852]: W1201 20:04:59.608593 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-70f9374c399a7c09adf7f9a0a51624b9f85f30bfacd8c857a70cf78ebadc1d72 WatchSource:0}: Error finding container 70f9374c399a7c09adf7f9a0a51624b9f85f30bfacd8c857a70cf78ebadc1d72: Status 404 returned error can't find the container with id 70f9374c399a7c09adf7f9a0a51624b9f85f30bfacd8c857a70cf78ebadc1d72 Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.612758 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 20:04:59 crc kubenswrapper[4852]: W1201 20:04:59.617965 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-30bd9b7fa5a5d739eefaf1947c8252dbb2c99ceba469fd6bc698024e556a5b27 WatchSource:0}: Error finding container 30bd9b7fa5a5d739eefaf1947c8252dbb2c99ceba469fd6bc698024e556a5b27: Status 404 returned error can't find the container with id 30bd9b7fa5a5d739eefaf1947c8252dbb2c99ceba469fd6bc698024e556a5b27 Dec 01 20:04:59 crc kubenswrapper[4852]: W1201 20:04:59.627101 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-5d2c75f7ec2f48784bce9c4824ccf9694c67731871a9a83c0f7039eb4a37ddd9 WatchSource:0}: Error finding container 5d2c75f7ec2f48784bce9c4824ccf9694c67731871a9a83c0f7039eb4a37ddd9: Status 404 returned error can't find the container with id 5d2c75f7ec2f48784bce9c4824ccf9694c67731871a9a83c0f7039eb4a37ddd9 Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.628697 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-dgxbk"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.629032 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.629996 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-j25pb"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.630314 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-hjkrm"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.630398 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.630571 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.630689 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-cjd9b"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.634097 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-j2q4c"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.634194 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.635500 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.635920 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.636389 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.638956 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.639063 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.639195 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.639265 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.639605 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.639952 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.639950 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.640115 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.640119 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-727gr"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.641287 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.641355 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.642301 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.642412 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.642613 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643105 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643224 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643525 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643655 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643762 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643866 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643882 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.643926 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.644154 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.645107 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.646553 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.652121 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.657543 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.670729 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.691471 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.710736 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.726640 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731425 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-binary-copy\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731487 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731504 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731523 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-cnibin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731540 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-k8s-cni-cncf-io\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731554 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-etc-kubernetes\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731569 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-os-release\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731584 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731598 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc2s6\" (UniqueName: \"kubernetes.io/projected/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-kube-api-access-lc2s6\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731625 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-system-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731642 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-multus-certs\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731660 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-os-release\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731712 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh8kn\" (UniqueName: \"kubernetes.io/projected/c16363da-fb1f-4f6f-af39-70bf7783f3fe-kube-api-access-lh8kn\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731755 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csjlv\" (UniqueName: \"kubernetes.io/projected/e823f9e3-954c-4254-9f06-893905a28152-kube-api-access-csjlv\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731779 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-system-cni-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731811 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h7nz\" (UniqueName: \"kubernetes.io/projected/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-kube-api-access-7h7nz\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731832 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-bin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731850 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731867 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cnibin\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731890 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731905 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731924 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731942 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731956 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-netns\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.731970 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znznr\" (UniqueName: \"kubernetes.io/projected/6c477f33-3400-4c50-b2fc-e9306088770e-kube-api-access-znznr\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732003 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-conf-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732020 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732039 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732076 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732096 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732138 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-hostroot\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732173 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732191 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e823f9e3-954c-4254-9f06-893905a28152-proxy-tls\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732205 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732227 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-kubelet\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732242 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732273 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732316 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732336 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9khtx\" (UniqueName: \"kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732373 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e823f9e3-954c-4254-9f06-893905a28152-mcd-auth-proxy-config\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732404 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-cni-binary-copy\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732424 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-multus\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732441 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732497 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732579 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732626 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732647 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732689 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-socket-dir-parent\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732711 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-multus-daemon-config\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732733 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732750 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c16363da-fb1f-4f6f-af39-70bf7783f3fe-hosts-file\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.732769 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e823f9e3-954c-4254-9f06-893905a28152-rootfs\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.734465 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.749690 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.764496 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.775683 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.788511 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.800757 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.821087 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.832686 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834003 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e823f9e3-954c-4254-9f06-893905a28152-mcd-auth-proxy-config\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834054 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-cni-binary-copy\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834079 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-multus\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834102 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834123 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834145 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834162 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834171 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-multus\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834180 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834210 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834235 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-socket-dir-parent\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834253 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-multus-daemon-config\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834256 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834271 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834291 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c16363da-fb1f-4f6f-af39-70bf7783f3fe-hosts-file\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e823f9e3-954c-4254-9f06-893905a28152-rootfs\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834533 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-binary-copy\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834548 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834548 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834562 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834290 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834583 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-cnibin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834599 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-k8s-cni-cncf-io\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834613 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-etc-kubernetes\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834630 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-os-release\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834637 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834643 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834684 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e823f9e3-954c-4254-9f06-893905a28152-rootfs\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834690 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc2s6\" (UniqueName: \"kubernetes.io/projected/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-kube-api-access-lc2s6\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834719 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-system-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834734 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-multus-certs\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834744 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c16363da-fb1f-4f6f-af39-70bf7783f3fe-hosts-file\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834750 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-os-release\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834762 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-socket-dir-parent\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834789 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh8kn\" (UniqueName: \"kubernetes.io/projected/c16363da-fb1f-4f6f-af39-70bf7783f3fe-kube-api-access-lh8kn\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834793 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-os-release\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834665 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834827 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csjlv\" (UniqueName: \"kubernetes.io/projected/e823f9e3-954c-4254-9f06-893905a28152-kube-api-access-csjlv\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834854 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-system-cni-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834879 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h7nz\" (UniqueName: \"kubernetes.io/projected/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-kube-api-access-7h7nz\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834902 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-bin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834922 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834943 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cnibin\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834959 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-system-cni-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834965 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834986 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835008 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835019 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e823f9e3-954c-4254-9f06-893905a28152-mcd-auth-proxy-config\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835030 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.834313 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835052 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-netns\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835056 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-system-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835074 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znznr\" (UniqueName: \"kubernetes.io/projected/6c477f33-3400-4c50-b2fc-e9306088770e-kube-api-access-znznr\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835081 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-multus-certs\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835095 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-conf-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835103 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835122 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835135 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-multus-daemon-config\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835141 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835163 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835177 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835185 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-binary-copy\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835180 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835208 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-etc-kubernetes\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835234 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-hostroot\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835251 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-hostroot\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835255 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-cni-bin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835260 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835276 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e823f9e3-954c-4254-9f06-893905a28152-proxy-tls\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835286 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835257 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-cni-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835309 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cnibin\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835299 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835097 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-cnibin\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835341 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-netns\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835341 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-kubelet\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.835358 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835364 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835379 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835300 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-os-release\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.835392 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:00.335382614 +0000 UTC m=+20.262464031 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835388 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835408 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835430 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9khtx\" (UniqueName: \"kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835434 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835545 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-multus-conf-dir\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835570 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835364 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-var-lib-kubelet\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.835623 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.837530 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.839117 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.839333 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.839402 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.839477 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6c477f33-3400-4c50-b2fc-e9306088770e-cni-binary-copy\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.839549 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6c477f33-3400-4c50-b2fc-e9306088770e-host-run-k8s-cni-cncf-io\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.843652 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.844249 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e823f9e3-954c-4254-9f06-893905a28152-proxy-tls\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.845303 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.859203 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.862982 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc2s6\" (UniqueName: \"kubernetes.io/projected/0a2eec37-e5b6-45bc-9e83-33be653a5dd7-kube-api-access-lc2s6\") pod \"multus-additional-cni-plugins-hjkrm\" (UID: \"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\") " pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.863082 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znznr\" (UniqueName: \"kubernetes.io/projected/6c477f33-3400-4c50-b2fc-e9306088770e-kube-api-access-znznr\") pod \"multus-cjd9b\" (UID: \"6c477f33-3400-4c50-b2fc-e9306088770e\") " pod="openshift-multus/multus-cjd9b" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.863544 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9khtx\" (UniqueName: \"kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx\") pod \"ovnkube-node-727gr\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.865598 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h7nz\" (UniqueName: \"kubernetes.io/projected/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-kube-api-access-7h7nz\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.869446 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csjlv\" (UniqueName: \"kubernetes.io/projected/e823f9e3-954c-4254-9f06-893905a28152-kube-api-access-csjlv\") pod \"machine-config-daemon-j25pb\" (UID: \"e823f9e3-954c-4254-9f06-893905a28152\") " pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.869685 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh8kn\" (UniqueName: \"kubernetes.io/projected/c16363da-fb1f-4f6f-af39-70bf7783f3fe-kube-api-access-lh8kn\") pod \"node-resolver-dgxbk\" (UID: \"c16363da-fb1f-4f6f-af39-70bf7783f3fe\") " pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.871384 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.883266 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.901370 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.910652 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.923050 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.932694 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.935889 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.935924 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.936021 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.936067 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:00.936054502 +0000 UTC m=+20.863135919 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.936257 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: E1201 20:04:59.936314 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:00.93629869 +0000 UTC m=+20.863380107 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.944872 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.952000 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dgxbk" Dec 01 20:04:59 crc kubenswrapper[4852]: W1201 20:04:59.963746 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc16363da_fb1f_4f6f_af39_70bf7783f3fe.slice/crio-0cbec027c0f34531d026d71062d8aba2547c32464fdf26916510420bedfd8303 WatchSource:0}: Error finding container 0cbec027c0f34531d026d71062d8aba2547c32464fdf26916510420bedfd8303: Status 404 returned error can't find the container with id 0cbec027c0f34531d026d71062d8aba2547c32464fdf26916510420bedfd8303 Dec 01 20:04:59 crc kubenswrapper[4852]: I1201 20:04:59.963861 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.037056 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.037222 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.037265 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037369 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037383 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037393 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037436 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:01.037420783 +0000 UTC m=+20.964502200 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037739 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:05:01.037730913 +0000 UTC m=+20.964812330 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037787 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037797 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037804 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.037824 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:01.037818316 +0000 UTC m=+20.964899733 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.047341 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.055684 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" Dec 01 20:05:00 crc kubenswrapper[4852]: W1201 20:05:00.056655 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode823f9e3_954c_4254_9f06_893905a28152.slice/crio-e6fb045626d300b241e8d4108afdebba87d62adbc0ad07f9639b150af250666a WatchSource:0}: Error finding container e6fb045626d300b241e8d4108afdebba87d62adbc0ad07f9639b150af250666a: Status 404 returned error can't find the container with id e6fb045626d300b241e8d4108afdebba87d62adbc0ad07f9639b150af250666a Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.066438 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-cjd9b" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.071865 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.234225 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-5kxfk"] Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.234920 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.237798 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.237986 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.238097 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.238151 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.255756 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.267283 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.286197 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.302305 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.317558 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.320504 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.320690 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.327375 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.328221 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.329394 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.330211 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.332625 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.333254 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.333914 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.335029 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.336088 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.336822 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.338107 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.340022 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.340479 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.340529 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/44f53bdc-e742-4661-a6b2-967f6847ade6-host\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.340573 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pttcf\" (UniqueName: \"kubernetes.io/projected/44f53bdc-e742-4661-a6b2-967f6847ade6-kube-api-access-pttcf\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.340599 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/44f53bdc-e742-4661-a6b2-967f6847ade6-serviceca\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.340641 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.340690 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:01.340676068 +0000 UTC m=+21.267757485 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.341040 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.341734 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.342222 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.343178 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.343712 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.344630 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.345049 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.345594 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.346577 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.347002 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.351606 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.352041 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.352819 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.353243 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.353918 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.354606 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.356179 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.356825 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.356901 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.357338 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.358165 4852 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.358256 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.359846 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.360963 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.361387 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.386377 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.391374 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.392209 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.392923 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.402699 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.424753 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.447391 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pttcf\" (UniqueName: \"kubernetes.io/projected/44f53bdc-e742-4661-a6b2-967f6847ade6-kube-api-access-pttcf\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.447437 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/44f53bdc-e742-4661-a6b2-967f6847ade6-serviceca\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.447514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/44f53bdc-e742-4661-a6b2-967f6847ade6-host\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.447578 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/44f53bdc-e742-4661-a6b2-967f6847ade6-host\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.447740 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.448656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/44f53bdc-e742-4661-a6b2-967f6847ade6-serviceca\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.454538 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" exitCode=0 Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.459781 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.460356 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.460562 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.461131 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.461785 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.462435 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.464131 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.464661 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.465231 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.465958 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.467779 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.468354 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.468866 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.469886 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.470409 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.474923 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.486216 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pttcf\" (UniqueName: \"kubernetes.io/projected/44f53bdc-e742-4661-a6b2-967f6847ade6-kube-api-access-pttcf\") pod \"node-ca-5kxfk\" (UID: \"44f53bdc-e742-4661-a6b2-967f6847ade6\") " pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.495263 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.513597 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.524287 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.541731 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.555336 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.559026 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.559753 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560237 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560279 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560293 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560304 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"e6fb045626d300b241e8d4108afdebba87d62adbc0ad07f9639b150af250666a"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560312 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dgxbk" event={"ID":"c16363da-fb1f-4f6f-af39-70bf7783f3fe","Type":"ContainerStarted","Data":"db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560322 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dgxbk" event={"ID":"c16363da-fb1f-4f6f-af39-70bf7783f3fe","Type":"ContainerStarted","Data":"0cbec027c0f34531d026d71062d8aba2547c32464fdf26916510420bedfd8303"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560332 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerStarted","Data":"99d828291ee133cbadf1a47b205ace03d36ad5280e41f35caefd603331fd4a01"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560343 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560380 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"18c4fac82d1f69189091ea329e5d88e1fa7d94498be36da4e3a3182100227822"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560406 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5d2c75f7ec2f48784bce9c4824ccf9694c67731871a9a83c0f7039eb4a37ddd9"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560417 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560427 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560436 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"30bd9b7fa5a5d739eefaf1947c8252dbb2c99ceba469fd6bc698024e556a5b27"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560445 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560468 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"70f9374c399a7c09adf7f9a0a51624b9f85f30bfacd8c857a70cf78ebadc1d72"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560480 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560492 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerStarted","Data":"67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.560504 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerStarted","Data":"d0f52af1cf14d60277f4808243fa0821cdb1188e7e2c29c84a119134d6fb0d86"} Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.569608 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.585196 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.595614 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5kxfk" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.601063 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: W1201 20:05:00.619377 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44f53bdc_e742_4661_a6b2_967f6847ade6.slice/crio-24bdb41313d2d6dd52d01b6090f753999d83bd946974d0f2fe75cf6b1966a505 WatchSource:0}: Error finding container 24bdb41313d2d6dd52d01b6090f753999d83bd946974d0f2fe75cf6b1966a505: Status 404 returned error can't find the container with id 24bdb41313d2d6dd52d01b6090f753999d83bd946974d0f2fe75cf6b1966a505 Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.622556 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.643678 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.670194 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.687513 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.716779 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.772695 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.811897 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.847508 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.887393 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.912520 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.952278 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.952334 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.952521 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.952580 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:02.952562631 +0000 UTC m=+22.879644048 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.952943 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: E1201 20:05:00.952981 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:02.952971224 +0000 UTC m=+22.880052641 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.965747 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:00 crc kubenswrapper[4852]: I1201 20:05:00.997880 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.037051 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.053174 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.053302 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053368 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:05:03.053341292 +0000 UTC m=+22.980422709 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053402 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053418 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053428 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053490 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:03.053477157 +0000 UTC m=+22.980558574 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.053492 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053668 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053681 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053691 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.053727 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:03.053720085 +0000 UTC m=+22.980801502 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.082348 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.121859 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.153475 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.194003 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.232571 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.273371 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.316892 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.318882 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.318936 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.318893 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.319030 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.319111 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.319215 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.354403 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.355707 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.355866 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.355920 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:03.355903804 +0000 UTC m=+23.282985221 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.401141 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.442082 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.468237 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5kxfk" event={"ID":"44f53bdc-e742-4661-a6b2-967f6847ade6","Type":"ContainerStarted","Data":"ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.468284 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5kxfk" event={"ID":"44f53bdc-e742-4661-a6b2-967f6847ade6","Type":"ContainerStarted","Data":"24bdb41313d2d6dd52d01b6090f753999d83bd946974d0f2fe75cf6b1966a505"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.470093 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3" exitCode=0 Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.470138 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.472469 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.472542 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.472557 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.482836 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.516386 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.559911 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.602191 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.633672 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.677185 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.720857 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.733580 4852 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.735562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.735589 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.735597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.735691 4852 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.757485 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.807581 4852 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.807868 4852 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.809009 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.809043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.809052 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.809066 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.809075 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.824136 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.827244 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.827284 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.827293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.827308 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.827317 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.835161 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.838301 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.842593 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.842627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.842640 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.842657 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.842669 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.857575 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.861426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.861475 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.861485 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.861503 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.861514 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.871655 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.871922 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.875003 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.875036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.875045 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.875062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.875074 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.884927 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: E1201 20:05:01.885091 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.887050 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.887080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.887090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.887104 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.887114 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.914617 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.961562 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.989789 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.989839 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.989852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.989871 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.989883 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:01Z","lastTransitionTime":"2025-12-01T20:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:01 crc kubenswrapper[4852]: I1201 20:05:01.995275 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:01Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.034950 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.076279 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.091898 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.091932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.091941 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.091956 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.091966 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.114108 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.157890 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.194518 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.194560 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.194573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.194590 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.194602 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.197027 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.232774 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.277428 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.297300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.297343 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.297355 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.297373 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.297388 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.319213 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:02 crc kubenswrapper[4852]: E1201 20:05:02.319403 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.319550 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.353675 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.400988 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.401027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.401041 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.401059 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.401075 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.401248 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.440013 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.478800 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a" exitCode=0 Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.478978 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.487211 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.487804 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.487868 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.487889 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.490685 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.503684 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.503807 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.504404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.504592 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.504691 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.523414 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.559686 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.602117 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.607359 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.607399 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.607412 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.607430 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.607442 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.636218 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.678021 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.709909 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.709945 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.709954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.709974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.709986 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.721128 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.754513 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.796060 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.811506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.811535 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.811545 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.811561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.811572 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.837114 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.876416 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.914022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.914076 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.914089 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.914108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.914121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:02Z","lastTransitionTime":"2025-12-01T20:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.919144 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.963272 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.969826 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.969870 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:02 crc kubenswrapper[4852]: E1201 20:05:02.969985 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:02 crc kubenswrapper[4852]: E1201 20:05:02.970000 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:02 crc kubenswrapper[4852]: E1201 20:05:02.970043 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:06.970027503 +0000 UTC m=+26.897108930 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:02 crc kubenswrapper[4852]: E1201 20:05:02.970077 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:06.970056994 +0000 UTC m=+26.897138451 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:02 crc kubenswrapper[4852]: I1201 20:05:02.993821 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:02Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.017272 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.017312 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.017320 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.017334 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.017344 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.037023 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.070669 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.070759 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.070793 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070828 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:05:07.070811035 +0000 UTC m=+26.997892452 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070903 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070928 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070903 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070940 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070946 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070957 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070976 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:07.07096929 +0000 UTC m=+26.998050707 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.070994 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:07.070989161 +0000 UTC m=+26.998070578 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.080182 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.116649 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.119242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.119289 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.119307 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.119327 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.119343 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.160714 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.200828 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.222585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.222675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.222703 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.222733 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.222761 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.244289 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.277489 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.315833 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.319903 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.319901 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.320030 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.320066 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.320145 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.320251 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.324578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.324619 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.324632 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.324649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.324659 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.356998 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.373638 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.373825 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: E1201 20:05:03.373914 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:07.373888773 +0000 UTC m=+27.300970230 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.426820 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.426872 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.426889 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.426911 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.426928 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.496997 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d" exitCode=0 Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.497096 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.521953 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.529394 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.529474 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.529491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.529511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.529527 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.542878 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.562559 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.582066 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.603970 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.620395 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.632022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.632064 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.632077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.632095 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.632108 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.637839 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.675978 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.716148 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.735133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.735182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.735199 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.735218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.735231 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.754181 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.798390 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.835308 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.837345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.837392 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.837409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.837429 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.837444 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.878088 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.916344 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.939669 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.939704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.939713 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.939726 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.939736 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:03Z","lastTransitionTime":"2025-12-01T20:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:03 crc kubenswrapper[4852]: I1201 20:05:03.954515 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.041667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.041707 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.041718 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.041730 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.041739 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.144565 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.144627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.144643 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.144663 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.144677 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.247074 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.247143 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.247164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.247232 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.247251 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.319583 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:04 crc kubenswrapper[4852]: E1201 20:05:04.319725 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.349818 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.349885 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.349903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.349932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.349951 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.454186 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.454289 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.454318 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.454353 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.454391 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.502612 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4" exitCode=0 Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.502696 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.509113 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.524440 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.542866 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.557751 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.557789 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.557798 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.557813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.557821 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.558615 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.575228 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.596184 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.610026 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.623338 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.638221 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.656109 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.659852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.659884 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.659894 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.659910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.659921 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.670825 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.683320 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.693920 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.704960 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.716410 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.727842 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:04Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.762851 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.762975 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.763002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.763033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.763060 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.865665 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.865716 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.865729 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.865749 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.865767 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.968533 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.968611 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.968631 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.968660 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:04 crc kubenswrapper[4852]: I1201 20:05:04.968681 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:04Z","lastTransitionTime":"2025-12-01T20:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.071524 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.071602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.071623 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.071648 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.071671 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.174719 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.174833 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.174852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.174886 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.174906 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.278037 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.278105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.278122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.278147 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.278164 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.319076 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.319167 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:05 crc kubenswrapper[4852]: E1201 20:05:05.319201 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.319083 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:05 crc kubenswrapper[4852]: E1201 20:05:05.319319 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:05 crc kubenswrapper[4852]: E1201 20:05:05.319410 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.381993 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.382624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.382712 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.382736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.382750 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.485822 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.485887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.485907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.485930 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.485951 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.516000 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736" exitCode=0 Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.516076 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.537560 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.555134 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.571981 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.586436 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.588960 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.589028 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.589047 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.589071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.589088 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.599563 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.620019 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.630117 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.643778 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.662906 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.678919 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.691937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.691989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.692001 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.692017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.692030 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.692966 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.707268 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.719388 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.741565 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.752989 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:05Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.793651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.793702 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.793714 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.793733 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.793745 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.897664 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.897742 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.897767 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.897797 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:05 crc kubenswrapper[4852]: I1201 20:05:05.897820 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:05Z","lastTransitionTime":"2025-12-01T20:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.000298 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.000381 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.000408 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.000439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.000493 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.104407 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.104510 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.104532 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.104562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.104582 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.207116 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.207181 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.207200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.207226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.207245 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.309487 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.309518 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.309526 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.309540 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.309552 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.319865 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:06 crc kubenswrapper[4852]: E1201 20:05:06.320016 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.411668 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.411700 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.411709 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.411722 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.411731 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.514303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.514365 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.514386 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.514416 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.514434 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.526666 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.527026 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.527868 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.536893 4852 generic.go:334] "Generic (PLEG): container finished" podID="0a2eec37-e5b6-45bc-9e83-33be653a5dd7" containerID="5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1" exitCode=0 Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.536975 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerDied","Data":"5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.557998 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.568448 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.569129 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.576970 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.592339 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.615802 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.618406 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.618441 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.618475 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.618498 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.618515 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.633646 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.654522 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.669006 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.685183 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.704685 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.722160 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.722222 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.722238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.722262 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.722279 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.724475 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.736081 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.748422 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.766130 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.783338 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.797099 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.807789 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.821162 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.824643 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.824678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.824687 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.824703 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.824711 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.833665 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.844883 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.861878 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.876049 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.887685 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.899084 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.914499 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.926678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.926729 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.926742 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.926762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.926774 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:06Z","lastTransitionTime":"2025-12-01T20:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.929178 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.952915 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.965884 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.977879 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:06 crc kubenswrapper[4852]: I1201 20:05:06.991775 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:06Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.002801 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.013611 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.013657 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.013752 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.013801 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.014075 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.013834545 +0000 UTC m=+34.940916002 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.014098 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.014089473 +0000 UTC m=+34.941170890 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.029184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.029225 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.029234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.029248 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.029257 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.115164 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.115280 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.115346 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.115580 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.115606 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.115624 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.115691 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.115668091 +0000 UTC m=+35.042749548 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.115842 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.115831316 +0000 UTC m=+35.042912733 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.116041 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.116110 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.116162 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.116271 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.11624334 +0000 UTC m=+35.043324797 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.132185 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.132225 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.132237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.132256 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.132274 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.234107 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.234141 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.234153 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.234169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.234179 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.319392 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.319438 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.319448 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.319651 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.319846 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.320022 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.337138 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.337199 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.337216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.337237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.337255 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.420023 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.420344 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: E1201 20:05:07.420515 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:15.420442244 +0000 UTC m=+35.347523701 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.440585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.440661 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.440678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.440702 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.440721 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.542878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.542934 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.542953 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.542976 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.542993 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.545603 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" event={"ID":"0a2eec37-e5b6-45bc-9e83-33be653a5dd7","Type":"ContainerStarted","Data":"59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.545689 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.560229 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.579875 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.598868 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.620743 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.637555 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.645515 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.645561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.645578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.645638 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.645658 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.657753 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.684929 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.699083 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.711595 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.727425 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.741098 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.747618 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.747658 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.747671 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.747688 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.747701 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.752783 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.766877 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.786785 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.797571 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:07Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.849784 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.849823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.849832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.849847 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.849857 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.951791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.951828 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.951839 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.951861 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:07 crc kubenswrapper[4852]: I1201 20:05:07.951901 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:07Z","lastTransitionTime":"2025-12-01T20:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.053976 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.054017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.054027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.054042 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.054050 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.156733 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.156781 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.156800 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.156824 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.156842 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.259040 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.259080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.259092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.259108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.259121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.322002 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:08 crc kubenswrapper[4852]: E1201 20:05:08.322139 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.362005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.362069 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.362088 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.362112 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.362133 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.466903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.466981 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.467005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.467038 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.467062 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.549922 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.570033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.570087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.570104 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.570129 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.570148 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.672999 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.673081 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.673144 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.673172 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.673190 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.776899 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.776974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.777000 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.777029 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.777051 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.879849 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.879973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.879999 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.880031 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.880050 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.982583 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.982645 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.982666 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.982693 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:08 crc kubenswrapper[4852]: I1201 20:05:08.982718 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:08Z","lastTransitionTime":"2025-12-01T20:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.085762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.085822 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.085840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.085864 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.085881 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.189290 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.189692 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.189864 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.190027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.190176 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.293118 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.293174 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.293190 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.293214 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.293233 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.320023 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.320106 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.320529 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:09 crc kubenswrapper[4852]: E1201 20:05:09.320902 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:09 crc kubenswrapper[4852]: E1201 20:05:09.321242 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:09 crc kubenswrapper[4852]: E1201 20:05:09.321372 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.396686 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.396724 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.396735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.396753 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.396766 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.499598 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.499647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.499659 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.499678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.499693 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.556239 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/0.log" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.560852 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583" exitCode=1 Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.560910 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.562254 4852 scope.go:117] "RemoveContainer" containerID="271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.586440 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.603199 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.603253 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.603301 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.603325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.603344 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.610570 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.628533 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.648831 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.662536 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.678164 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.696181 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.707693 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.707739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.707767 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.707792 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.707807 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.712161 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.724109 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.734770 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.746376 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.763308 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:08Z\\\",\\\"message\\\":\\\"1] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896092 6101 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896231 6101 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 20:05:08.896386 6101 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896407 6101 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896569 6101 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896669 6101 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.897024 6101 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.777756 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.791207 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.802744 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:09Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.811939 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.811973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.811983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.811998 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.812008 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.915227 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.915278 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.915293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.915315 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:09 crc kubenswrapper[4852]: I1201 20:05:09.915331 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:09Z","lastTransitionTime":"2025-12-01T20:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.018667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.018710 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.018721 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.018739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.018753 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.122023 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.122500 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.122511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.122525 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.122539 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.225049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.225091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.225099 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.225114 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.225125 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.319624 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:10 crc kubenswrapper[4852]: E1201 20:05:10.319804 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.327292 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.327348 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.327368 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.327391 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.327410 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.337966 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.351031 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.366350 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.385907 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.398301 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.425653 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.430021 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.430085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.430106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.430131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.430149 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.444382 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.460090 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.474637 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.487695 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.503716 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.518974 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.528746 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.532363 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.532405 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.532416 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.532432 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.532476 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.545725 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.565431 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/0.log" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.568002 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.568103 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.576548 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:08Z\\\",\\\"message\\\":\\\"1] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896092 6101 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896231 6101 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 20:05:08.896386 6101 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896407 6101 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896569 6101 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896669 6101 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.897024 6101 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.596019 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.612412 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.626694 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.635108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.635212 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.635233 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.635828 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.636082 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.640572 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.653655 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.663408 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.676732 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.688375 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.699952 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.713958 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.739184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.739237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.739253 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.739274 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.739294 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.741077 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:08Z\\\",\\\"message\\\":\\\"1] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896092 6101 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896231 6101 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 20:05:08.896386 6101 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896407 6101 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896569 6101 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896669 6101 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.897024 6101 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.757667 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.775389 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.788646 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.803291 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.843005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.843070 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.843087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.843111 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.843129 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.945523 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.945587 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.945605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.945629 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:10 crc kubenswrapper[4852]: I1201 20:05:10.945647 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:10Z","lastTransitionTime":"2025-12-01T20:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.048473 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.048550 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.048570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.048596 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.048623 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.151181 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.151233 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.151250 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.151276 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.151293 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.254204 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.254273 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.254293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.254317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.254337 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.319485 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.319500 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:11 crc kubenswrapper[4852]: E1201 20:05:11.319611 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.319508 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:11 crc kubenswrapper[4852]: E1201 20:05:11.319718 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:11 crc kubenswrapper[4852]: E1201 20:05:11.319993 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.357536 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.357613 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.357632 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.357658 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.357676 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.460644 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.460692 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.460704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.460721 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.460734 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.563373 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.563414 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.563422 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.563437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.563446 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.572687 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/1.log" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.573497 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/0.log" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.576658 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5" exitCode=1 Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.576705 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.576763 4852 scope.go:117] "RemoveContainer" containerID="271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.577839 4852 scope.go:117] "RemoveContainer" containerID="d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5" Dec 01 20:05:11 crc kubenswrapper[4852]: E1201 20:05:11.578105 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.594962 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.606276 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.619972 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.633091 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.650816 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.666125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.666197 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.666218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.666248 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.666266 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.668195 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.682970 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.697059 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.708926 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.721230 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.739892 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.756213 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.766637 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.769577 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.769651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.769676 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.769711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.769734 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.786439 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.817566 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:08Z\\\",\\\"message\\\":\\\"1] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896092 6101 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896231 6101 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 20:05:08.896386 6101 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896407 6101 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896569 6101 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896669 6101 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.897024 6101 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:11Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.872954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.873039 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.873068 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.873101 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.873122 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.975646 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.975713 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.975736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.975764 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:11 crc kubenswrapper[4852]: I1201 20:05:11.975782 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:11Z","lastTransitionTime":"2025-12-01T20:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.078544 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.078594 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.078607 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.078626 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.078639 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.163699 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.163804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.163832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.163869 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.163895 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.189246 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:12Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.195093 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.195164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.195181 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.195207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.195224 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.215158 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:12Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.220675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.220755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.220773 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.220803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.220823 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.241337 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:12Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.246280 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.246353 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.246374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.246401 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.246421 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.265777 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:12Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.271129 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.271200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.271226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.271259 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.271284 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.291595 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:12Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.291822 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.294205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.294259 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.294278 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.294302 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.294319 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.319710 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:12 crc kubenswrapper[4852]: E1201 20:05:12.319928 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.397878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.397943 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.397966 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.397996 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.398018 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.501005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.501106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.501127 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.501150 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.501186 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.583876 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/1.log" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.604061 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.604179 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.604232 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.604259 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.604283 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.707752 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.707824 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.707848 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.707881 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.707908 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.810384 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.810501 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.810529 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.810561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.810580 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.913950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.914039 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.914055 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.914073 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:12 crc kubenswrapper[4852]: I1201 20:05:12.914087 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:12Z","lastTransitionTime":"2025-12-01T20:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.017278 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.017355 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.017377 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.017407 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.017424 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.120739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.120804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.120822 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.120846 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.120863 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.181323 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb"] Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.188057 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.191341 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.192573 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.211376 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.223755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.223827 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.223852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.223888 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.223915 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.234126 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.253103 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.274890 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.284283 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b04aef5d-1916-452d-8706-885e8e52f9ef-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.284424 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5m27\" (UniqueName: \"kubernetes.io/projected/b04aef5d-1916-452d-8706-885e8e52f9ef-kube-api-access-x5m27\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.284528 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.284630 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.309547 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://271512e52f48c4492ee7b722fbcaf09d988cb3d21c76527a6a22bd6b06baf583\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:08Z\\\",\\\"message\\\":\\\"1] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896092 6101 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.896231 6101 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 20:05:08.896386 6101 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896407 6101 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896569 6101 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:08.896669 6101 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 20:05:08.897024 6101 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.319755 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.319876 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.319788 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:13 crc kubenswrapper[4852]: E1201 20:05:13.320047 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:13 crc kubenswrapper[4852]: E1201 20:05:13.320208 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:13 crc kubenswrapper[4852]: E1201 20:05:13.320327 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.327187 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.327238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.327257 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.327296 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.327315 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.330015 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.347599 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.363546 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.364659 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.365705 4852 scope.go:117] "RemoveContainer" containerID="d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5" Dec 01 20:05:13 crc kubenswrapper[4852]: E1201 20:05:13.365894 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.375845 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.385855 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.385968 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.386008 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b04aef5d-1916-452d-8706-885e8e52f9ef-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.386083 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5m27\" (UniqueName: \"kubernetes.io/projected/b04aef5d-1916-452d-8706-885e8e52f9ef-kube-api-access-x5m27\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.387000 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.387642 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b04aef5d-1916-452d-8706-885e8e52f9ef-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.390367 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.392946 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b04aef5d-1916-452d-8706-885e8e52f9ef-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.401330 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5m27\" (UniqueName: \"kubernetes.io/projected/b04aef5d-1916-452d-8706-885e8e52f9ef-kube-api-access-x5m27\") pod \"ovnkube-control-plane-749d76644c-2x6sb\" (UID: \"b04aef5d-1916-452d-8706-885e8e52f9ef\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.405651 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.417313 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.428579 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.430002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.430071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.430090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.430110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.430124 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.441840 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.454177 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.466394 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.477829 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.504826 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.508799 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.532835 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.532876 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.532893 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.532912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.532926 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: W1201 20:05:13.536147 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb04aef5d_1916_452d_8706_885e8e52f9ef.slice/crio-6bd463fa4f1fe4a62cf1b3a01d861cf2b5bbf943708e9296916cb5d204c6bd09 WatchSource:0}: Error finding container 6bd463fa4f1fe4a62cf1b3a01d861cf2b5bbf943708e9296916cb5d204c6bd09: Status 404 returned error can't find the container with id 6bd463fa4f1fe4a62cf1b3a01d861cf2b5bbf943708e9296916cb5d204c6bd09 Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.540397 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.567189 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.584296 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.596367 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" event={"ID":"b04aef5d-1916-452d-8706-885e8e52f9ef","Type":"ContainerStarted","Data":"6bd463fa4f1fe4a62cf1b3a01d861cf2b5bbf943708e9296916cb5d204c6bd09"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.599698 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.609434 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.620187 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.635298 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.635354 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.635368 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.635390 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.635404 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.637150 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.652131 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.667139 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.679177 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.693093 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.706980 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.721194 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.733212 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.738206 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.738234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.738244 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.738258 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.738270 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.841311 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.841384 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.841408 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.841439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.841513 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.946044 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.946125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.946151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.946182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:13 crc kubenswrapper[4852]: I1201 20:05:13.946206 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:13Z","lastTransitionTime":"2025-12-01T20:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.049231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.049704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.049724 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.049750 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.049769 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.153647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.153728 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.153752 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.153783 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.153807 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.256618 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.256696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.256721 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.256751 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.256776 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.320044 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:14 crc kubenswrapper[4852]: E1201 20:05:14.320208 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.359114 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.359172 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.359195 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.359220 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.359238 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.462282 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.462355 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.462377 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.462428 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.462479 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.565629 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.565688 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.565711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.565735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.565752 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.602591 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" event={"ID":"b04aef5d-1916-452d-8706-885e8e52f9ef","Type":"ContainerStarted","Data":"65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.602675 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" event={"ID":"b04aef5d-1916-452d-8706-885e8e52f9ef","Type":"ContainerStarted","Data":"43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.624056 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.646862 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.660893 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.668966 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.669033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.669057 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.669085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.669107 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.682802 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.720082 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.739391 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.758204 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.772307 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.772372 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.772390 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.772415 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.772432 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.776812 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.796854 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.816532 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.836980 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.851875 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.866622 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.875377 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.875419 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.875431 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.875448 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.875484 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.882725 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.895483 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.912203 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.978860 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.978932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.978952 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.978976 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:14 crc kubenswrapper[4852]: I1201 20:05:14.978994 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:14Z","lastTransitionTime":"2025-12-01T20:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.082251 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.082315 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.082336 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.082358 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.082375 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.101843 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.101904 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.102016 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.102047 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.102160 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.102144271 +0000 UTC m=+51.029225698 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.102180 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.102172442 +0000 UTC m=+51.029253869 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.185571 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.185628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.185646 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.185672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.185690 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.202796 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.202975 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203008 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.202974485 +0000 UTC m=+51.130055932 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.203154 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203186 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203223 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203246 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203349 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.203320426 +0000 UTC m=+51.130401903 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203366 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203391 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203410 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.203499 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.203482491 +0000 UTC m=+51.130563948 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.288659 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.288720 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.288739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.288762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.288814 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.319171 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.319272 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.319372 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.319272 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.319489 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.319654 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.391418 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.391506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.391521 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.391537 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.391549 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.494696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.494760 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.494778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.494803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.494822 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.506330 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.506552 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: E1201 20:05:15.506634 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:05:31.506611452 +0000 UTC m=+51.433692899 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.598026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.598110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.598131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.598161 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.598184 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.701018 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.701075 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.701090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.701107 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.701121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.803283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.803359 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.803381 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.803409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.803432 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.906017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.906084 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.906109 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.906138 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:15 crc kubenswrapper[4852]: I1201 20:05:15.906169 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:15Z","lastTransitionTime":"2025-12-01T20:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.008956 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.009029 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.009052 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.009086 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.009109 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.111752 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.111825 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.111849 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.111879 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.111905 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.214357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.214406 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.214423 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.214446 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.214495 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.319233 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:16 crc kubenswrapper[4852]: E1201 20:05:16.320169 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.321261 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.321360 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.321379 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.321403 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.321423 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.424799 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.424858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.424874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.424897 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.424914 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.527786 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.527836 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.527855 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.527874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.527890 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.630565 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.630630 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.630649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.630675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.630694 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.734036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.734109 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.734132 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.734161 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.734181 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.837054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.837131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.837151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.837176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.837196 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.940608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.940671 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.940692 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.940714 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:16 crc kubenswrapper[4852]: I1201 20:05:16.940732 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:16Z","lastTransitionTime":"2025-12-01T20:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.043682 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.043742 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.043760 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.043782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.043800 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.147027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.147127 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.147148 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.147173 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.147191 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.249601 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.249645 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.249662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.249687 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.249705 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.319692 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.319794 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.319813 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:17 crc kubenswrapper[4852]: E1201 20:05:17.319878 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:17 crc kubenswrapper[4852]: E1201 20:05:17.319979 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:17 crc kubenswrapper[4852]: E1201 20:05:17.320117 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.353379 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.353443 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.353487 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.353512 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.353530 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.456334 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.456396 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.456412 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.456437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.456488 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.559340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.559404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.559422 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.559485 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.559505 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.662588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.662654 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.662667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.662688 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.662701 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.765876 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.765942 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.765962 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.765987 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.766004 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.868804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.868905 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.868924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.868949 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.868966 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.971002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.971060 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.971080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.971109 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:17 crc kubenswrapper[4852]: I1201 20:05:17.971130 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:17Z","lastTransitionTime":"2025-12-01T20:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.074652 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.074718 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.074741 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.074770 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.074792 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.177564 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.177631 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.177651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.177674 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.177691 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.281234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.281300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.281317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.281341 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.281359 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.319743 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:18 crc kubenswrapper[4852]: E1201 20:05:18.319941 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.384124 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.384193 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.384218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.384251 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.384275 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.488155 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.488210 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.488226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.488248 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.488266 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.591553 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.591624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.591644 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.591672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.591690 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.694892 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.694946 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.694964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.694987 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.695005 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.798694 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.798758 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.798777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.798803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.798824 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.813478 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.835746 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.858027 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.877939 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.898593 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.901638 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.901686 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.901696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.901717 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.901731 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:18Z","lastTransitionTime":"2025-12-01T20:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.921963 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.938989 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.960726 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:18 crc kubenswrapper[4852]: I1201 20:05:18.981155 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:18Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.004974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.005039 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.005064 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.005094 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.005119 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.006086 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.025021 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.041824 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.061968 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.083696 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.108956 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.109010 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.109034 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.109068 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.109089 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.118352 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.142170 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.158728 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:19Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.212721 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.212798 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.212818 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.212847 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.212869 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.315516 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.315608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.315631 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.315666 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.315687 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.319905 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.320054 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:19 crc kubenswrapper[4852]: E1201 20:05:19.320114 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.320234 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:19 crc kubenswrapper[4852]: E1201 20:05:19.320310 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:19 crc kubenswrapper[4852]: E1201 20:05:19.320588 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.419764 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.419852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.419878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.419914 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.419940 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.523277 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.523333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.523344 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.523367 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.523382 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.626334 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.626405 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.626429 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.626498 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.626529 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.730528 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.730593 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.730617 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.730648 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.730673 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.835450 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.835548 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.835572 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.835624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.835648 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.939624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.939706 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.939725 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.939753 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:19 crc kubenswrapper[4852]: I1201 20:05:19.939771 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:19Z","lastTransitionTime":"2025-12-01T20:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.042953 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.043004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.043019 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.043040 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.043053 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.146812 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.146870 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.146891 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.146921 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.146943 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.250034 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.250088 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.250105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.250127 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.250144 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.319867 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:20 crc kubenswrapper[4852]: E1201 20:05:20.320078 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.339245 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.352604 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.352669 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.352688 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.352713 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.352730 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.355524 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.373731 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.386638 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.401795 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.420923 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.442416 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.456068 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.456120 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.456131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.456153 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.456165 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.466921 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.480437 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.490235 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.507715 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.519147 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.533215 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.546775 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.558917 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.558943 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.558975 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.558994 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.559004 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.564291 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.578686 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:20Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.661952 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.662332 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.662552 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.662696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.662842 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.765767 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.765823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.765843 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.765866 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.765884 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.868246 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.868300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.868317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.868342 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.868362 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.970660 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.970720 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.970738 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.970762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:20 crc kubenswrapper[4852]: I1201 20:05:20.970779 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:20Z","lastTransitionTime":"2025-12-01T20:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.074114 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.074205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.074226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.074253 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.074348 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.177330 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.177409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.177427 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.177485 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.177505 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.281327 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.281389 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.281403 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.281426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.281441 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.319950 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.320014 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.319956 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:21 crc kubenswrapper[4852]: E1201 20:05:21.320200 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:21 crc kubenswrapper[4852]: E1201 20:05:21.320323 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:21 crc kubenswrapper[4852]: E1201 20:05:21.320561 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.384154 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.384198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.384211 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.384227 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.384242 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.487532 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.487590 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.487609 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.487633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.487649 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.590123 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.590184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.590208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.590239 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.590263 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.693812 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.693865 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.693882 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.693903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.693922 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.796105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.796133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.796142 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.796154 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.796164 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.899347 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.899410 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.899426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.899475 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:21 crc kubenswrapper[4852]: I1201 20:05:21.899493 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:21Z","lastTransitionTime":"2025-12-01T20:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.002033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.002119 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.002140 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.002170 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.002190 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.105439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.105526 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.105549 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.105577 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.105596 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.208796 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.208881 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.208917 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.208950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.208974 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.312221 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.312341 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.312369 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.312399 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.312420 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.320058 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.320311 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.415491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.415564 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.415585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.415609 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.415626 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.451380 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.451445 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.451496 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.451522 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.451539 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.471274 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:22Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.477646 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.477754 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.477778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.477809 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.477833 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.498949 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:22Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.508588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.508664 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.508701 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.508733 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.508755 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.530995 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:22Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.536580 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.536648 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.536666 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.536693 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.536710 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.559224 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:22Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.563929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.563986 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.564005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.564031 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.564052 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.584680 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:22Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:22 crc kubenswrapper[4852]: E1201 20:05:22.584923 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.587245 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.587329 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.587354 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.587385 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.587408 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.690602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.690662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.690680 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.690705 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.690727 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.794198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.794255 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.794272 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.794297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.794314 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.898724 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.898815 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.898835 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.898865 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:22 crc kubenswrapper[4852]: I1201 20:05:22.898885 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:22Z","lastTransitionTime":"2025-12-01T20:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.001896 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.001957 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.001973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.002006 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.002025 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.104844 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.104924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.104944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.104970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.104990 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.207697 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.207761 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.207777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.207809 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.207827 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.311303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.311367 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.311384 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.311408 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.311425 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.319592 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.319681 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.319592 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:23 crc kubenswrapper[4852]: E1201 20:05:23.319769 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:23 crc kubenswrapper[4852]: E1201 20:05:23.319865 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:23 crc kubenswrapper[4852]: E1201 20:05:23.319992 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.413364 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.413401 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.413409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.413422 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.413431 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.516324 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.516382 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.516403 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.516427 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.516449 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.618923 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.618978 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.618994 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.619017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.619033 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.721632 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.721690 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.721706 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.721729 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.721746 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.824983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.825056 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.825075 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.825105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.825123 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.928241 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.928300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.928317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.928345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:23 crc kubenswrapper[4852]: I1201 20:05:23.928362 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:23Z","lastTransitionTime":"2025-12-01T20:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.031093 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.031146 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.031162 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.031188 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.031203 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.133337 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.133381 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.133392 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.133409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.133420 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.235851 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.235888 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.235904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.235919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.235930 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.319626 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:24 crc kubenswrapper[4852]: E1201 20:05:24.319878 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.338770 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.338852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.338874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.338918 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.338938 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.442547 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.442609 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.442625 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.442650 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.442668 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.545428 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.545543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.545564 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.545586 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.545603 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.648345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.648405 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.648429 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.648487 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.648504 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.751085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.751144 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.751167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.751196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.751219 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.854069 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.854115 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.854126 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.854141 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.854152 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.957084 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.957165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.957190 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.957220 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:24 crc kubenswrapper[4852]: I1201 20:05:24.957242 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:24Z","lastTransitionTime":"2025-12-01T20:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.061095 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.061194 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.061220 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.061256 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.061286 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.164773 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.164866 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.164889 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.164922 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.164940 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.268027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.268087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.268103 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.268132 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.268152 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.319625 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.319653 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.319724 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:25 crc kubenswrapper[4852]: E1201 20:05:25.319855 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:25 crc kubenswrapper[4852]: E1201 20:05:25.319968 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:25 crc kubenswrapper[4852]: E1201 20:05:25.320110 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.321292 4852 scope.go:117] "RemoveContainer" containerID="d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.372736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.373186 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.373205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.373234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.373255 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.476604 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.476675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.476696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.476723 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.476741 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.580358 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.580426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.580445 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.580506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.580533 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.643146 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/1.log" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.647602 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.648429 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.671512 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.683601 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.683669 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.683691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.683719 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.683740 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.693040 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.711026 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.730111 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.754247 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.783753 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.786813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.786852 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.786861 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.786878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.786888 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.808541 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.835862 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.855650 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.876431 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.889266 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.889304 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.889316 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.889335 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.889347 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.901255 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.925662 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.940675 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.952938 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.963323 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.979851 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:25Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.991532 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.991578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.991590 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.991606 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:25 crc kubenswrapper[4852]: I1201 20:05:25.991616 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:25Z","lastTransitionTime":"2025-12-01T20:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.095062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.095096 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.095106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.095121 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.095130 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.198494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.198540 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.198550 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.198565 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.198575 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.301940 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.302007 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.302025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.302052 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.302070 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.319516 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:26 crc kubenswrapper[4852]: E1201 20:05:26.319823 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.405312 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.405402 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.405423 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.405484 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.405505 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.510063 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.510136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.510156 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.510184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.510203 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.613395 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.613443 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.613478 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.613504 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.613519 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.654724 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/2.log" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.656029 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/1.log" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.660403 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" exitCode=1 Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.660507 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.660589 4852 scope.go:117] "RemoveContainer" containerID="d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.661673 4852 scope.go:117] "RemoveContainer" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" Dec 01 20:05:26 crc kubenswrapper[4852]: E1201 20:05:26.661942 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.686917 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.707834 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.716979 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.717031 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.717052 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.717078 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.717100 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.724801 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.743995 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.766990 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.786281 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.805775 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.820357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.820422 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.820441 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.820506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.820532 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.829689 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.865612 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5cc5c127c173006086ca0a431315604e0b2a5a8dc93e07765f7d32a088e5ab5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:11Z\\\",\\\"message\\\":\\\"Where:[where column _uuid == {39432221-5995-412b-967b-35e1a9405ec7}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1201 20:05:10.422694 6241 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422785 6241 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.422868 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 20:05:10.423731 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 20:05:10.423763 6241 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 20:05:10.423774 6241 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI1201 20:05:10.423810 6241 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 4.357211ms\\\\nF1201 20:05:10.423837 6241 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.890354 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.915961 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.924400 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.924494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.924515 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.924558 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.924614 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:26Z","lastTransitionTime":"2025-12-01T20:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.939692 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.958780 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.976620 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:26 crc kubenswrapper[4852]: I1201 20:05:26.997783 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:26Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.014646 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.028176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.028261 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.028290 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.028314 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.028330 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.131532 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.131612 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.131631 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.131661 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.131739 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.235060 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.235136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.235159 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.235190 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.235214 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.318962 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.319076 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.319148 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:27 crc kubenswrapper[4852]: E1201 20:05:27.319166 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:27 crc kubenswrapper[4852]: E1201 20:05:27.319321 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:27 crc kubenswrapper[4852]: E1201 20:05:27.319488 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.337790 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.337863 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.337905 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.337941 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.337966 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.441483 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.441540 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.441556 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.441580 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.441596 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.544922 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.544988 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.545010 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.545044 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.545070 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.648862 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.648919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.648936 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.648963 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.648981 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.666744 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/2.log" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.673812 4852 scope.go:117] "RemoveContainer" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" Dec 01 20:05:27 crc kubenswrapper[4852]: E1201 20:05:27.674114 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.691398 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.711100 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.725850 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.742482 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.753239 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.753607 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.753647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.753680 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.753702 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.759881 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.778896 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.797281 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.819226 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.847552 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.857059 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.857142 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.857165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.857201 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.857222 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.867715 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.888441 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.909403 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.928174 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.945294 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.960610 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.960652 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.960671 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.960697 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.960716 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:27Z","lastTransitionTime":"2025-12-01T20:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.972002 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:27 crc kubenswrapper[4852]: I1201 20:05:27.990766 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:27Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.063913 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.064005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.064026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.064091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.064192 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.168611 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.168711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.168738 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.168774 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.168801 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.272661 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.272748 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.272778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.272814 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.272839 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.319232 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:28 crc kubenswrapper[4852]: E1201 20:05:28.319481 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.376324 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.376390 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.376404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.376420 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.376431 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.479566 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.479627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.479644 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.479667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.479687 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.584098 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.584177 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.584203 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.584236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.584260 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.686990 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.687043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.687059 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.687079 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.687092 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.790058 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.790125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.790144 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.790173 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.790193 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.893033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.893111 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.893128 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.893156 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.893176 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.995892 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.995974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.995994 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.996024 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:28 crc kubenswrapper[4852]: I1201 20:05:28.996046 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:28Z","lastTransitionTime":"2025-12-01T20:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.099793 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.099856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.099872 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.099896 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.099914 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.203571 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.203648 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.203667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.203695 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.203715 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.311950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.312047 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.312074 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.312121 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.312145 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.319287 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.319386 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.319402 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:29 crc kubenswrapper[4852]: E1201 20:05:29.319607 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:29 crc kubenswrapper[4852]: E1201 20:05:29.319878 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:29 crc kubenswrapper[4852]: E1201 20:05:29.319997 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.415863 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.415929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.415941 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.415980 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.415998 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.520842 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.520901 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.520926 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.520956 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.520977 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.624427 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.624494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.624505 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.624522 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.624534 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.728183 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.728278 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.728297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.728360 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.728385 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.831635 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.831705 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.831726 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.831760 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.831782 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.936000 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.936054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.936066 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.936085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:29 crc kubenswrapper[4852]: I1201 20:05:29.936097 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:29Z","lastTransitionTime":"2025-12-01T20:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.039293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.039398 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.039426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.039502 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.039563 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.143493 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.143562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.143579 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.143608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.143631 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.247147 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.248011 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.248151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.248312 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.248495 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.319244 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:30 crc kubenswrapper[4852]: E1201 20:05:30.319430 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.347481 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.351887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.351937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.351950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.351972 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.351986 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.371901 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.390153 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.409199 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.436818 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.457123 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.457216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.457246 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.457282 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.457309 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.467941 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.494704 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.518263 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.536556 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.556145 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.561432 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.561578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.561601 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.561628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.561685 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.577018 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.604850 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.625320 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.645998 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.661634 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.664609 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.664645 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.664655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.664671 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.664681 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.680004 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:30Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.766983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.767049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.767068 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.767095 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.767121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.875880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.875967 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.875989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.876020 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.876040 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.979495 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.979554 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.979567 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.979588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:30 crc kubenswrapper[4852]: I1201 20:05:30.979601 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:30Z","lastTransitionTime":"2025-12-01T20:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.082855 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.082957 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.082978 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.083015 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.083035 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.186981 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.187035 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.187047 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.187067 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.187080 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.188617 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.188697 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.188929 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.189112 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.189068547 +0000 UTC m=+83.116150134 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.188945 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.189224 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.189194322 +0000 UTC m=+83.116275779 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.289984 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290014 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290058 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290076 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290100 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290119 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290158 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290179 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.290145792 +0000 UTC m=+83.217227249 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290323 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.290335 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290349 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290431 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290492 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290529 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290600 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.290536165 +0000 UTC m=+83.217617722 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290612 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.290685 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.290659989 +0000 UTC m=+83.217741436 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.319226 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.319291 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.319315 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.319753 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.319745 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.319917 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.393388 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.393489 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.393508 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.393534 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.393552 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.497445 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.497556 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.497578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.497604 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.497625 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.595067 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.595327 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: E1201 20:05:31.595494 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:06:03.595421373 +0000 UTC m=+83.522502830 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.600947 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.601000 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.601017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.601040 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.601058 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.705366 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.705491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.705520 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.705563 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.705591 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.808844 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.808899 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.808913 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.808933 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.808946 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.911850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.911910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.911928 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.911954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:31 crc kubenswrapper[4852]: I1201 20:05:31.911976 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:31Z","lastTransitionTime":"2025-12-01T20:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.016022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.016093 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.016111 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.016140 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.016327 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.120296 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.120333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.120342 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.120356 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.120366 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.223777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.223814 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.223826 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.223845 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.223858 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.319808 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.320011 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.325819 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.325887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.325898 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.325915 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.325927 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.429437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.429567 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.429595 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.429628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.429649 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.532775 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.532831 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.532842 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.532860 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.532871 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.638325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.638414 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.638449 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.638533 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.638563 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.740921 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.740977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.740990 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.741008 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.741021 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.768065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.768169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.768206 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.768242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.768262 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.792620 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.799344 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.799421 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.799489 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.799527 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.799550 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.818996 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.824305 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.824378 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.824404 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.824438 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.824505 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.843789 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.850422 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.852374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.852483 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.852511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.852546 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.852570 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.864937 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.871687 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.874432 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.879281 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.879345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.879365 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.879395 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.879421 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.895931 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: E1201 20:05:32.896268 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.898260 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.898294 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.898305 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.898322 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.898336 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:32Z","lastTransitionTime":"2025-12-01T20:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.899225 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.914522 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.933585 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.948002 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.962630 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.977387 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:32 crc kubenswrapper[4852]: I1201 20:05:32.990513 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:32Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.000902 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.000970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.000989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.001014 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.001034 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.006937 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.027647 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.060897 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.085760 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.103664 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.103723 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.103735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.103754 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.103765 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.108079 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.129431 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.144991 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.160938 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:33Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.207167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.207619 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.207791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.207909 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.208012 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.311765 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.311858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.311884 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.311919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.311940 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.319221 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.319333 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.319396 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:33 crc kubenswrapper[4852]: E1201 20:05:33.319516 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:33 crc kubenswrapper[4852]: E1201 20:05:33.319752 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:33 crc kubenswrapper[4852]: E1201 20:05:33.319994 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.414098 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.414164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.414183 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.414242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.414262 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.518234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.518316 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.518339 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.518371 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.518402 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.620687 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.620771 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.620791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.620816 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.620833 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.723398 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.723437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.723466 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.723485 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.723496 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.825830 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.825895 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.825913 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.825937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.825956 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.928538 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.928617 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.928636 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.928662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:33 crc kubenswrapper[4852]: I1201 20:05:33.928681 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:33Z","lastTransitionTime":"2025-12-01T20:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.031141 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.031187 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.031199 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.031217 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.031228 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.134326 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.134379 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.134395 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.134413 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.134428 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.236658 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.236707 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.236719 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.236735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.236747 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.320142 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:34 crc kubenswrapper[4852]: E1201 20:05:34.320413 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.339416 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.339517 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.339544 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.339573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.339597 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.442943 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.443002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.443022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.443047 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.443065 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.545777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.545818 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.545829 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.545846 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.545859 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.648939 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.649051 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.649078 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.649108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.649127 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.751876 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.751928 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.751948 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.751973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.751991 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.854198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.854267 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.854289 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.854319 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.854341 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.956869 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.956911 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.956930 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.956946 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:34 crc kubenswrapper[4852]: I1201 20:05:34.956957 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:34Z","lastTransitionTime":"2025-12-01T20:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.059257 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.059316 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.059333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.059357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.059375 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.162539 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.162596 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.162612 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.162645 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.162663 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.266135 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.266215 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.266248 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.266280 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.266303 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.319492 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.319514 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:35 crc kubenswrapper[4852]: E1201 20:05:35.319684 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.319514 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:35 crc kubenswrapper[4852]: E1201 20:05:35.319773 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:35 crc kubenswrapper[4852]: E1201 20:05:35.319923 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.368882 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.368946 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.368970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.368994 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.369017 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.472105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.472146 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.472176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.472200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.472211 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.575374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.575435 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.575479 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.575504 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.575522 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.677887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.677915 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.677923 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.677937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.677945 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.781112 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.781197 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.781218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.781246 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.781262 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.885249 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.885323 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.885345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.885375 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.885398 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.988330 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.988420 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.988439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.988499 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:35 crc kubenswrapper[4852]: I1201 20:05:35.988516 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:35Z","lastTransitionTime":"2025-12-01T20:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.092036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.092091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.092110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.092133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.092152 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.195705 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.195764 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.195784 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.195815 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.195839 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.299196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.299268 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.299297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.299341 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.299359 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.319374 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:36 crc kubenswrapper[4852]: E1201 20:05:36.319581 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.402775 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.402836 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.402857 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.402883 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.402901 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.506159 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.506268 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.506291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.506317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.506335 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.608297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.608385 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.608416 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.608483 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.608510 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.710922 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.710971 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.710984 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.711005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.711018 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.813747 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.813786 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.813795 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.813808 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.813818 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.916611 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.916680 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.916715 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.916745 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:36 crc kubenswrapper[4852]: I1201 20:05:36.916787 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:36Z","lastTransitionTime":"2025-12-01T20:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.019766 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.019832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.019856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.019928 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.019951 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.123043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.123538 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.123574 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.123608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.123628 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.226301 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.226358 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.226374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.226392 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.226406 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.319113 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.319200 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.319301 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:37 crc kubenswrapper[4852]: E1201 20:05:37.319464 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:37 crc kubenswrapper[4852]: E1201 20:05:37.319634 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:37 crc kubenswrapper[4852]: E1201 20:05:37.319935 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.330026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.330071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.330086 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.330105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.330120 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.433500 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.433580 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.433605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.433634 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.433654 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.537174 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.537245 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.537266 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.537291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.537309 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.640909 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.640977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.640995 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.641020 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.641044 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.744297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.744332 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.744340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.744353 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.744362 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.847232 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.847272 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.847283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.847297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.847309 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.949529 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.949567 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.949578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.949594 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:37 crc kubenswrapper[4852]: I1201 20:05:37.949604 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:37Z","lastTransitionTime":"2025-12-01T20:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.052627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.052704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.052722 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.052746 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.052764 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.154792 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.154829 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.154845 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.154861 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.154871 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.257779 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.257855 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.257878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.257908 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.257926 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.319163 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:38 crc kubenswrapper[4852]: E1201 20:05:38.319341 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.361055 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.361130 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.361158 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.361191 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.361217 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.464548 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.464596 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.464608 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.464628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.464641 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.567831 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.568304 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.568449 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.568644 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.568777 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.671399 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.671771 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.671970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.672151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.672197 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.775350 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.775480 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.775506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.775538 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.775561 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.879687 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.879772 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.879797 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.879828 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.879852 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.983212 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.983256 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.983268 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.983287 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:38 crc kubenswrapper[4852]: I1201 20:05:38.983299 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:38Z","lastTransitionTime":"2025-12-01T20:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.085015 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.085091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.085105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.085124 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.085138 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.187602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.187710 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.187730 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.187757 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.187778 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.291140 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.291179 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.291192 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.291208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.291220 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.319440 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:39 crc kubenswrapper[4852]: E1201 20:05:39.319623 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.319441 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.319440 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:39 crc kubenswrapper[4852]: E1201 20:05:39.319759 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:39 crc kubenswrapper[4852]: E1201 20:05:39.319852 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.393978 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.394011 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.394020 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.394033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.394043 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.498436 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.498543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.498569 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.498600 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.498623 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.601930 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.602001 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.602025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.602058 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.602082 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.704369 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.704446 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.704497 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.704524 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.704545 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.808480 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.808547 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.808564 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.808590 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.808609 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.912091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.912159 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.912178 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.912213 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:39 crc kubenswrapper[4852]: I1201 20:05:39.912237 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:39Z","lastTransitionTime":"2025-12-01T20:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.014367 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.014428 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.014446 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.014505 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.014525 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.118002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.118077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.118105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.118136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.118160 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.225885 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.226200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.226390 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.226417 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.226436 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.319523 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:40 crc kubenswrapper[4852]: E1201 20:05:40.319757 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.329070 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.329117 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.329135 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.329160 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.329180 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.342159 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.359798 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.375398 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.390794 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.406048 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.420944 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.430977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.431017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.431030 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.431046 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.431076 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.445516 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.461514 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.475310 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.488278 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.502605 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.514778 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.529527 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.533092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.533122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.533131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.533145 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.533156 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.543364 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.552264 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.563924 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.577410 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:40Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.635607 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.635642 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.635651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.635665 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.635673 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.737857 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.737907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.737919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.737944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.737956 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.840205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.840238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.840247 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.840261 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.840270 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.943083 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.943161 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.943177 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.943231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:40 crc kubenswrapper[4852]: I1201 20:05:40.943246 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:40Z","lastTransitionTime":"2025-12-01T20:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.046025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.046071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.046085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.046106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.046122 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.148691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.148759 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.148777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.148803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.148821 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.250784 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.250820 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.250857 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.250873 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.250884 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.319346 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:41 crc kubenswrapper[4852]: E1201 20:05:41.319488 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.319360 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.319352 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:41 crc kubenswrapper[4852]: E1201 20:05:41.319659 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:41 crc kubenswrapper[4852]: E1201 20:05:41.319577 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.354517 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.354597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.354619 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.354647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.354664 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.456845 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.456925 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.456950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.456983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.457005 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.559758 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.559832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.559853 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.559887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.559908 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.662740 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.662792 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.662804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.662823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.662835 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.770193 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.770240 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.770259 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.770276 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.770289 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.873904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.873978 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.874002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.874033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.874058 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.977125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.977187 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.977207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.977240 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:41 crc kubenswrapper[4852]: I1201 20:05:41.977262 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:41Z","lastTransitionTime":"2025-12-01T20:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.079767 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.079817 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.079833 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.079858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.079874 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.183294 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.183344 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.183357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.183376 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.183388 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.285761 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.285821 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.285838 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.285862 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.285883 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.319207 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:42 crc kubenswrapper[4852]: E1201 20:05:42.319687 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.320860 4852 scope.go:117] "RemoveContainer" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" Dec 01 20:05:42 crc kubenswrapper[4852]: E1201 20:05:42.321244 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.388880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.388944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.388961 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.388986 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.389003 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.492275 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.492337 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.492355 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.492378 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.492394 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.595556 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.595730 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.595755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.595988 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.596065 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.699563 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.699622 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.699642 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.699667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.699685 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.802489 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.802536 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.802551 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.802570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.802583 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.905156 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.905251 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.905266 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.905300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.905318 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.980708 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.980756 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.980768 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.980791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:42 crc kubenswrapper[4852]: I1201 20:05:42.980804 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:42Z","lastTransitionTime":"2025-12-01T20:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:42 crc kubenswrapper[4852]: E1201 20:05:42.997836 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:42Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.002400 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.002495 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.002516 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.002539 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.002555 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.021079 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:43Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.025945 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.026011 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.026026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.026044 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.026077 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.045816 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:43Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.050895 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.050931 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.050945 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.050964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.050977 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.067389 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:43Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.071874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.071919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.071932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.071951 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.071964 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.091819 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:43Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.092048 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.094221 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.094276 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.094295 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.094324 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.094343 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.196904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.196935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.196944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.196957 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.196967 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.299591 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.299643 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.299655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.299673 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.299686 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.318946 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.318999 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.318968 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.319100 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.319162 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:43 crc kubenswrapper[4852]: E1201 20:05:43.319234 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.405623 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.405684 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.405699 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.405717 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.405768 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.508961 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.509019 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.509030 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.509048 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.509059 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.611633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.611682 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.611700 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.611723 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.611738 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.714328 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.714360 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.714369 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.714382 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.714392 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.817555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.817612 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.817629 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.817651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.817668 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.920910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.920960 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.920968 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.920983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:43 crc kubenswrapper[4852]: I1201 20:05:43.920995 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:43Z","lastTransitionTime":"2025-12-01T20:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.023802 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.023854 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.023864 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.023880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.023889 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.126203 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.126252 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.126265 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.126282 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.126295 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.228719 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.228760 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.228771 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.228784 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.228793 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.319807 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:44 crc kubenswrapper[4852]: E1201 20:05:44.319991 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.331218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.331273 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.331290 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.331314 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.331331 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.433398 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.433494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.433514 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.433542 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.433559 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.535622 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.535685 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.535704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.535731 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.535749 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.638296 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.638986 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.639083 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.639190 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.639289 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.741485 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.741548 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.741565 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.741592 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.741607 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.844624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.844661 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.844670 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.844683 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.844693 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.947912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.948129 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.948250 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.948345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:44 crc kubenswrapper[4852]: I1201 20:05:44.948431 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:44Z","lastTransitionTime":"2025-12-01T20:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.051977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.052385 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.052578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.052735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.052876 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.155049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.155120 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.155141 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.155164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.155182 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.258026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.258078 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.258090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.258107 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.258120 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.319213 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.319314 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.319214 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:45 crc kubenswrapper[4852]: E1201 20:05:45.319332 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:45 crc kubenswrapper[4852]: E1201 20:05:45.319523 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:45 crc kubenswrapper[4852]: E1201 20:05:45.319624 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.361043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.361080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.361091 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.361108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.361121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.463712 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.463773 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.463786 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.463801 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.463810 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.566283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.566348 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.566361 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.566378 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.566390 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.668968 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.669008 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.669019 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.669036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.669047 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.771108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.771152 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.771165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.771359 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.771372 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.874160 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.874205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.874218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.874238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.874254 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.977791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.977840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.977850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.977874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:45 crc kubenswrapper[4852]: I1201 20:05:45.977884 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:45Z","lastTransitionTime":"2025-12-01T20:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.080180 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.080225 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.080237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.080273 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.080285 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.183228 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.183295 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.183319 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.183347 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.183369 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.286330 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.286701 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.286711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.286726 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.286736 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.319772 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:46 crc kubenswrapper[4852]: E1201 20:05:46.319981 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.389406 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.389453 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.389483 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.389501 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.389514 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.492232 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.492383 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.492444 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.492474 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.492597 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.595292 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.595324 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.595333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.595345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.595354 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.698028 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.698053 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.698062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.698075 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.698083 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.739953 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/0.log" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.740028 4852 generic.go:334] "Generic (PLEG): container finished" podID="6c477f33-3400-4c50-b2fc-e9306088770e" containerID="67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293" exitCode=1 Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.740068 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerDied","Data":"67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.740622 4852 scope.go:117] "RemoveContainer" containerID="67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.754811 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.767966 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.777915 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.789264 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.799904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.799936 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.799944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.799958 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.799969 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.805758 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.819174 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.831104 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.844964 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.861449 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.879735 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.892489 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.902521 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.902585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.902597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.902614 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.902628 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:46Z","lastTransitionTime":"2025-12-01T20:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.903008 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.917914 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.931430 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.944566 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.961903 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:46 crc kubenswrapper[4852]: I1201 20:05:46.973556 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:46Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.004589 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.004641 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.004655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.004676 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.004693 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.107133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.107171 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.107186 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.107207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.107224 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.210286 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.210322 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.210331 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.210345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.210355 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.312105 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.312160 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.312172 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.312188 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.312199 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.319703 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.319736 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.319837 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:47 crc kubenswrapper[4852]: E1201 20:05:47.319834 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:47 crc kubenswrapper[4852]: E1201 20:05:47.319989 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:47 crc kubenswrapper[4852]: E1201 20:05:47.320081 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.413992 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.414036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.414047 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.414066 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.414082 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.520363 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.520419 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.520439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.520499 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.520524 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.623627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.623678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.623690 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.623708 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.623719 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.726088 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.726158 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.726182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.726208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.726230 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.744370 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/0.log" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.744432 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerStarted","Data":"664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.761218 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.773922 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.788086 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.798714 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.808598 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.819571 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.828765 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.828802 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.828813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.828829 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.828841 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.829856 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.842146 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.857972 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.869282 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.881788 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.910973 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.930735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.930776 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.930785 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.930798 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.930807 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:47Z","lastTransitionTime":"2025-12-01T20:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.933472 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.946209 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.957835 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.971927 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:47 crc kubenswrapper[4852]: I1201 20:05:47.982233 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:47Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.032755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.032804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.032813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.032826 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.032837 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.135642 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.135923 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.136026 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.136136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.136226 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.238513 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.238573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.238586 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.238603 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.238614 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.319585 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:48 crc kubenswrapper[4852]: E1201 20:05:48.319984 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.340409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.340704 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.340824 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.340913 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.340978 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.443024 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.443084 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.443095 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.443113 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.443129 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.544983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.545015 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.545025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.545038 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.545047 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.647809 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.647874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.647887 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.647904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.647944 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.749890 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.749923 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.749934 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.749948 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.749958 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.852169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.852206 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.852222 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.852237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.852249 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.954691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.954743 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.954758 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.954774 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:48 crc kubenswrapper[4852]: I1201 20:05:48.954784 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:48Z","lastTransitionTime":"2025-12-01T20:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.057264 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.057325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.057337 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.057352 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.057363 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.159525 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.159573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.159586 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.159605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.159619 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.261711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.261757 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.261772 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.261791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.261804 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.319866 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.319921 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.320060 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:49 crc kubenswrapper[4852]: E1201 20:05:49.320047 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:49 crc kubenswrapper[4852]: E1201 20:05:49.320166 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:49 crc kubenswrapper[4852]: E1201 20:05:49.320384 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.364001 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.364043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.364053 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.364068 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.364079 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.465941 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.465999 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.466013 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.466032 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.466044 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.568928 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.568966 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.568997 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.569013 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.569024 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.671623 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.671679 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.671694 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.671715 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.671730 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.773848 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.773892 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.773903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.773918 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.773928 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.875566 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.875610 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.875623 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.875639 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.875650 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.977955 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.978005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.978020 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.978038 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:49 crc kubenswrapper[4852]: I1201 20:05:49.978048 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:49Z","lastTransitionTime":"2025-12-01T20:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.079698 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.079737 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.079748 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.079763 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.079774 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.182329 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.182356 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.182364 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.182376 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.182384 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.284765 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.284808 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.284821 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.284838 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.284850 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.319665 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:50 crc kubenswrapper[4852]: E1201 20:05:50.319801 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.332056 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.343040 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.351557 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.360789 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.371139 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.383432 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.386471 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.386506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.386515 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.386530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.386541 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.398153 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.408210 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.420033 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.443183 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.452610 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.462437 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.473093 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.489093 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.489129 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.489138 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.489154 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.489164 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.494485 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.507885 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.520487 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.536242 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:50Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.591727 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.591770 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.591782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.591795 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.591806 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.693966 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.694000 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.694009 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.694022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.694031 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.796333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.796397 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.796409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.796430 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.796442 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.898482 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.898520 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.898529 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.898542 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:50 crc kubenswrapper[4852]: I1201 20:05:50.898551 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:50Z","lastTransitionTime":"2025-12-01T20:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.000122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.000164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.000175 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.000194 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.000205 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.102206 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.102243 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.102252 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.102265 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.102273 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.203731 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.203759 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.203767 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.203778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.203787 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.305874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.305958 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.305985 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.306018 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.306053 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.319981 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.320534 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.320586 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:51 crc kubenswrapper[4852]: E1201 20:05:51.320740 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:51 crc kubenswrapper[4852]: E1201 20:05:51.320774 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:51 crc kubenswrapper[4852]: E1201 20:05:51.320878 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.330997 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.408212 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.408477 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.408587 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.408668 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.408736 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.510948 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.511042 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.511065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.511092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.511113 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.613531 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.613856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.614037 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.614231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.614398 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.718138 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.718174 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.718182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.718196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.718208 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.821834 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.821890 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.821907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.821929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.821946 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.924376 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.924415 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.924427 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.924442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:51 crc kubenswrapper[4852]: I1201 20:05:51.924470 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:51Z","lastTransitionTime":"2025-12-01T20:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.026588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.026629 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.026641 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.026657 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.026669 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.128549 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.128578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.128587 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.128602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.128613 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.231729 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.231854 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.231869 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.231964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.232056 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.319483 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:52 crc kubenswrapper[4852]: E1201 20:05:52.319635 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.334619 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.334672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.334689 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.334714 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.334730 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.436736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.436777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.436786 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.436799 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.436808 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.538736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.538773 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.538782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.538795 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.538805 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.641875 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.641923 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.641935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.641954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.641965 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.744922 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.744973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.744986 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.745003 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.745019 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.847804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.847853 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.847866 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.847884 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.847898 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.949863 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.949912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.949927 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.949942 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:52 crc kubenswrapper[4852]: I1201 20:05:52.949952 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:52Z","lastTransitionTime":"2025-12-01T20:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.052605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.052659 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.052673 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.052691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.052700 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.114289 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.114340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.114356 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.114376 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.114392 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.128888 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:53Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.133030 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.133096 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.133109 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.133131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.133145 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.147028 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:53Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.151243 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.151285 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.151298 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.151319 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.151333 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.165438 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:53Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.169254 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.169295 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.169309 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.169325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.169339 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.183416 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:53Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.186974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.187017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.187032 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.187054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.187073 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.199854 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:53Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.200245 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.201773 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.201804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.201819 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.201838 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.201851 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.304207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.304271 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.304288 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.304310 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.304329 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.319718 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.319758 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.319841 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.319728 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.319934 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:53 crc kubenswrapper[4852]: E1201 20:05:53.319997 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.407299 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.407348 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.407365 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.407382 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.407393 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.509628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.509669 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.509681 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.509697 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.509708 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.611810 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.611850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.611860 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.611877 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.611888 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.714327 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.714407 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.714432 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.714487 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.714505 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.816886 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.816943 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.816953 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.816985 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.816995 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.920219 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.920265 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.920275 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.920291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:53 crc kubenswrapper[4852]: I1201 20:05:53.920303 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:53Z","lastTransitionTime":"2025-12-01T20:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.023442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.023495 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.023506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.023524 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.023536 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.126283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.126334 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.126348 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.126368 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.126379 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.231155 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.231203 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.231214 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.231234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.231247 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.319400 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:54 crc kubenswrapper[4852]: E1201 20:05:54.319633 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.333754 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.333788 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.333798 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.333813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.333826 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.435616 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.435664 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.435674 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.435689 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.435726 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.537959 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.538004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.538016 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.538032 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.538043 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.639916 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.639954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.639965 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.639977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.639985 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.742494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.742530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.742539 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.742552 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.742562 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.844919 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.844974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.844987 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.845004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.845020 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.947439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.947504 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.947518 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.947537 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:54 crc kubenswrapper[4852]: I1201 20:05:54.947551 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:54Z","lastTransitionTime":"2025-12-01T20:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.049862 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.049904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.049912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.049927 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.049940 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.151790 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.151835 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.151847 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.151867 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.151879 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.254545 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.254588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.254599 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.254616 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.254629 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.319825 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.319880 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.319941 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:55 crc kubenswrapper[4852]: E1201 20:05:55.320051 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:55 crc kubenswrapper[4852]: E1201 20:05:55.320130 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:55 crc kubenswrapper[4852]: E1201 20:05:55.320250 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.357108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.357160 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.357176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.357198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.357216 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.459930 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.459982 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.459995 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.460016 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.460026 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.561970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.562021 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.562033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.562053 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.562066 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.664092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.664755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.664768 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.664783 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.664793 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.766260 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.766289 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.766301 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.766320 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.766334 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.868488 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.868558 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.868570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.868593 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.868606 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.970012 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.970060 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.970070 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.970084 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:55 crc kubenswrapper[4852]: I1201 20:05:55.970092 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:55Z","lastTransitionTime":"2025-12-01T20:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.072241 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.072278 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.072293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.072312 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.072323 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.174583 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.174625 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.174637 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.174656 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.174667 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.276867 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.276902 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.276911 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.276924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.276934 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.319630 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:56 crc kubenswrapper[4852]: E1201 20:05:56.319773 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.321400 4852 scope.go:117] "RemoveContainer" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.379238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.379633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.379655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.379672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.379684 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.482843 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.482872 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.482880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.482892 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.482901 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.585739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.585797 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.585814 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.585837 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.585850 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.688058 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.688107 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.688122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.688141 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.688152 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.790513 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.790560 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.790571 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.790591 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.790605 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.892548 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.892587 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.892598 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.892613 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.892624 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.995007 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.995052 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.995064 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.995081 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:56 crc kubenswrapper[4852]: I1201 20:05:56.995092 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:56Z","lastTransitionTime":"2025-12-01T20:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.097429 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.097494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.097508 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.097526 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.097538 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.199572 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.199609 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.199620 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.199634 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.199643 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.302240 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.302272 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.302282 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.302295 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.302304 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.318936 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.319040 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:57 crc kubenswrapper[4852]: E1201 20:05:57.319066 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.318936 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:57 crc kubenswrapper[4852]: E1201 20:05:57.319195 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:57 crc kubenswrapper[4852]: E1201 20:05:57.319256 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.403960 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.404004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.404017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.404035 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.404046 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.506627 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.506661 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.506672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.506685 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.506694 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.608719 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.608769 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.608782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.608799 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.608811 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.711416 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.711464 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.711475 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.711491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.711502 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.772780 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/3.log" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.773781 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/2.log" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.777194 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" exitCode=1 Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.777233 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.777268 4852 scope.go:117] "RemoveContainer" containerID="77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.778433 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:05:57 crc kubenswrapper[4852]: E1201 20:05:57.778764 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.800868 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.810915 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.813633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.813668 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.813680 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.813701 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.813717 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.824263 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.838490 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4677ab15-06de-4dbe-9ba8-e5b34645c84d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ff2e5b90958523977e4150eafee9e93676c4cadf56821d67af12044d1531e6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.851577 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.867432 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.882979 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.896685 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.907804 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.915762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.915803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.915815 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.915831 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.915844 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:57Z","lastTransitionTime":"2025-12-01T20:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.922697 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.937569 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.964291 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:57Z\\\",\\\"message\\\":\\\"r.go:444] Built service openshift-console/console LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 services_controller.go:445] Built service openshift-console/console LB template configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 lb_config.go:1031] Cluster endpoints for openshift-console/downloads for network=default are: map[]\\\\nI1201 20:05:57.461834 6816 services_controller.go:443] Built service openshift-console/downloads LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.213\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:80, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 20:05:57.461844 6816 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461852 6816 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nF1201 20:05:57.461852 6816 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.979261 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:57 crc kubenswrapper[4852]: I1201 20:05:57.994164 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:57Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.006701 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:58Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.018242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.018279 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.018291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.018307 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.018318 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.021705 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:58Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.036079 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:58Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.049145 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:05:58Z is after 2025-08-24T17:21:41Z" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.122004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.122103 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.122123 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.122157 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.122181 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.225408 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.225476 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.225486 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.225505 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.225517 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.319312 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:05:58 crc kubenswrapper[4852]: E1201 20:05:58.319591 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.328839 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.328893 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.328909 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.328933 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.328947 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.431284 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.431346 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.431366 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.431392 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.431409 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.534381 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.535048 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.535315 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.535553 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.535743 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.639570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.640036 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.640198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.640501 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.640648 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.744410 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.744710 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.744762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.744801 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.744826 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.782558 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/3.log" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.847537 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.847601 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.847622 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.847692 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.847712 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.950544 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.950605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.950622 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.950649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:58 crc kubenswrapper[4852]: I1201 20:05:58.950668 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:58Z","lastTransitionTime":"2025-12-01T20:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.053440 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.053511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.053520 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.053534 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.053544 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.155904 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.155964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.155978 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.156003 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.156016 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.258952 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.259019 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.259035 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.259059 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.259079 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.319377 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.319523 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:05:59 crc kubenswrapper[4852]: E1201 20:05:59.319602 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.319382 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:05:59 crc kubenswrapper[4852]: E1201 20:05:59.319769 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:05:59 crc kubenswrapper[4852]: E1201 20:05:59.319923 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.362011 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.362092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.362118 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.362149 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.362186 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.464858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.464897 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.464907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.464925 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.464938 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.568087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.568139 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.568151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.568169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.568183 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.670980 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.671062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.671085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.671115 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.671139 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.773530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.773575 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.773586 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.773602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.773613 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.875889 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.875937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.875950 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.875966 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.875978 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.978137 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.978183 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.978192 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.978205 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:05:59 crc kubenswrapper[4852]: I1201 20:05:59.978215 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:05:59Z","lastTransitionTime":"2025-12-01T20:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.080170 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.080224 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.080238 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.080258 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.080271 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.182777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.182805 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.182813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.182828 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.182838 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.284894 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.284974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.284996 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.285025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.285047 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.319735 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:00 crc kubenswrapper[4852]: E1201 20:06:00.319979 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.340299 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.356616 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4677ab15-06de-4dbe-9ba8-e5b34645c84d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ff2e5b90958523977e4150eafee9e93676c4cadf56821d67af12044d1531e6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.371163 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.387275 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.387320 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.387330 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.387350 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.387360 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.390998 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.404203 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.415301 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.436291 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.454425 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.469254 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.481528 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.489735 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.489787 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.489801 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.489817 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.489828 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.493527 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.510395 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.523735 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.538748 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.557491 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:57Z\\\",\\\"message\\\":\\\"r.go:444] Built service openshift-console/console LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 services_controller.go:445] Built service openshift-console/console LB template configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 lb_config.go:1031] Cluster endpoints for openshift-console/downloads for network=default are: map[]\\\\nI1201 20:05:57.461834 6816 services_controller.go:443] Built service openshift-console/downloads LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.213\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:80, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 20:05:57.461844 6816 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461852 6816 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nF1201 20:05:57.461852 6816 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.579230 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.591592 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.591662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.591679 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.591696 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.591730 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.598800 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.613987 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:00Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.694279 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.694350 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.694363 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.694379 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.694411 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.796312 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.796346 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.796357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.796374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.796386 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.899109 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.899154 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.899163 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.899179 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:00 crc kubenswrapper[4852]: I1201 20:06:00.899190 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:00Z","lastTransitionTime":"2025-12-01T20:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.003200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.003263 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.003277 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.003293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.003326 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.105528 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.105566 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.105579 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.105593 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.105601 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.207803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.207840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.207851 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.207865 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.207875 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.309856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.309901 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.309913 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.309929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.309941 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.319278 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.319294 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:01 crc kubenswrapper[4852]: E1201 20:06:01.319374 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:01 crc kubenswrapper[4852]: E1201 20:06:01.319440 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.319438 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:01 crc kubenswrapper[4852]: E1201 20:06:01.319669 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.413174 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.413264 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.413282 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.413303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.413321 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.515292 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.515345 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.515361 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.515385 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.515402 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.618032 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.618061 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.618069 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.618081 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.618090 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.720408 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.720527 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.720561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.720594 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.720620 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.823131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.823172 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.823185 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.823198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.823208 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.925672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.925727 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.925744 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.925768 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:01 crc kubenswrapper[4852]: I1201 20:06:01.925785 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:01Z","lastTransitionTime":"2025-12-01T20:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.028817 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.028888 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.028907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.028933 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.028952 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.131882 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.131961 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.131984 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.132014 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.132036 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.235910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.235964 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.235983 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.236007 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.236026 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.320024 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:02 crc kubenswrapper[4852]: E1201 20:06:02.320534 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.338537 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.338598 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.338616 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.338641 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.338661 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.441973 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.442029 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.442046 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.442072 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.442089 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.545016 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.545087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.545097 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.545112 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.545121 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.647858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.647907 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.647924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.647948 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.647965 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.750713 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.750782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.750808 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.750836 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.750857 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.852924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.852974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.852992 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.853017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.853034 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.955531 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.955790 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.955872 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.955935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:02 crc kubenswrapper[4852]: I1201 20:06:02.955992 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:02Z","lastTransitionTime":"2025-12-01T20:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.058573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.058668 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.058691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.058717 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.058735 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.161544 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.161840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.161849 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.161862 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.161871 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.225203 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.225277 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.225573 4852 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.225653 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.225630155 +0000 UTC m=+147.152711602 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.225837 4852 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.225921 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.225899704 +0000 UTC m=+147.152981161 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.265297 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.265374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.265397 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.265429 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.265494 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.319306 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.319376 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.319521 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.319550 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.319753 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.319833 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.326590 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.326758 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.326849 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.326882 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.326847573 +0000 UTC m=+147.253929030 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327023 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327027 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327050 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327065 4852 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327073 4852 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327085 4852 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327144 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.327122291 +0000 UTC m=+147.254203758 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.327172 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.327160003 +0000 UTC m=+147.254241470 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.368135 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.368194 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.368217 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.368244 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.368265 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.471545 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.471604 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.471616 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.471638 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.471649 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.574842 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.574895 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.574906 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.574924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.574937 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.581631 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.581695 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.581709 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.581734 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.581746 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.594958 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.599360 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.599389 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.599397 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.599409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.599420 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.613533 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.617522 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.617568 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.617580 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.617597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.617610 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.631362 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.631591 4852 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.631752 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs podName:7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a nodeName:}" failed. No retries permitted until 2025-12-01 20:07:07.631719311 +0000 UTC m=+147.558800728 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs") pod "network-metrics-daemon-j2q4c" (UID: "7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.633089 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.637260 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.637303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.637314 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.637331 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.637341 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.651560 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.655028 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.655053 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.655062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.655077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.655089 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.670692 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:03Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:03 crc kubenswrapper[4852]: E1201 20:06:03.671090 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.678184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.678231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.678242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.678258 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.678270 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.781570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.781630 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.781649 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.781676 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.781692 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.884651 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.884692 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.884701 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.884713 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.884723 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.987751 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.987827 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.987846 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.987875 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:03 crc kubenswrapper[4852]: I1201 20:06:03.987896 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:03Z","lastTransitionTime":"2025-12-01T20:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.091559 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.091647 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.091663 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.091684 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.091700 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.194924 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.195006 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.195031 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.195065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.195090 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.298883 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.298941 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.298954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.298977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.298994 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.319898 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:04 crc kubenswrapper[4852]: E1201 20:06:04.320122 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.402481 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.402531 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.402543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.402569 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.402582 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.505763 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.505843 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.505859 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.505903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.505918 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.609267 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.609347 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.609375 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.609420 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.609447 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.713398 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.713508 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.713534 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.713565 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.713584 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.815682 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.815742 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.815759 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.815786 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.815802 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.919823 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.919912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.919937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.919989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:04 crc kubenswrapper[4852]: I1201 20:06:04.920016 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:04Z","lastTransitionTime":"2025-12-01T20:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.023675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.023732 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.023745 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.023764 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.023779 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.127010 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.127057 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.127075 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.127096 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.127109 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.230178 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.230231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.230245 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.230263 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.230275 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.319865 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.319914 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.319921 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:05 crc kubenswrapper[4852]: E1201 20:06:05.320101 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:05 crc kubenswrapper[4852]: E1201 20:06:05.320278 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:05 crc kubenswrapper[4852]: E1201 20:06:05.320407 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.334257 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.334340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.334356 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.334374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.334388 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.437208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.437340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.437361 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.437394 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.437421 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.541187 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.541236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.541247 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.541269 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.541281 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.644711 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.644774 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.644792 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.644818 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.644837 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.748277 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.748378 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.748411 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.748445 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.748511 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.852959 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.853044 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.853099 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.853134 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.853160 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.956329 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.956423 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.956437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.956472 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:05 crc kubenswrapper[4852]: I1201 20:06:05.956484 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:05Z","lastTransitionTime":"2025-12-01T20:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.060196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.060266 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.060279 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.060301 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.060316 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.163326 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.163377 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.163386 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.163405 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.163416 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.266340 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.266402 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.266413 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.266432 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.266443 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.320242 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:06 crc kubenswrapper[4852]: E1201 20:06:06.320585 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.369301 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.369392 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.369418 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.369496 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.369539 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.472731 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.472831 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.472850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.472886 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.472915 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.575840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.575903 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.575918 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.575936 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.575950 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.679346 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.679409 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.679426 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.679449 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.679495 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.782193 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.782270 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.782293 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.782318 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.782338 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.891115 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.891169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.891178 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.891192 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.891201 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.994591 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.994650 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.994667 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.994690 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:06 crc kubenswrapper[4852]: I1201 20:06:06.994707 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:06Z","lastTransitionTime":"2025-12-01T20:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.098077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.098154 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.098178 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.098208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.098231 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.201500 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.201561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.201578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.201602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.201620 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.304778 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.304867 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.304893 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.304925 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.304950 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.319764 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.319794 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.319836 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:07 crc kubenswrapper[4852]: E1201 20:06:07.320019 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:07 crc kubenswrapper[4852]: E1201 20:06:07.320115 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:07 crc kubenswrapper[4852]: E1201 20:06:07.320240 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.408151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.408202 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.408221 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.408243 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.408262 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.511102 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.511145 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.511156 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.511168 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.511176 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.614092 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.614133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.614145 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.614161 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.614173 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.717566 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.717628 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.717646 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.717669 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.717689 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.819526 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.819563 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.819572 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.819585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.819594 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.921876 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.921930 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.921945 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.921965 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:07 crc kubenswrapper[4852]: I1201 20:06:07.921980 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:07Z","lastTransitionTime":"2025-12-01T20:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.024578 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.024625 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.024644 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.024666 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.024684 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.127494 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.127543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.127555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.127574 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.127591 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.230384 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.230481 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.230502 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.230527 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.230548 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.319018 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:08 crc kubenswrapper[4852]: E1201 20:06:08.319195 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.333028 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.333082 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.333100 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.333124 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.333141 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.436247 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.436331 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.436357 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.436391 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.436417 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.539540 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.539584 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.539595 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.539611 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.539624 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.642588 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.642689 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.642722 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.642753 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.642771 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.745314 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.745380 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.745396 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.745420 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.745443 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.848810 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.848894 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.848934 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.848969 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.848992 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.951788 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.951891 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.951914 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.951946 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:08 crc kubenswrapper[4852]: I1201 20:06:08.951963 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:08Z","lastTransitionTime":"2025-12-01T20:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.055526 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.055597 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.055629 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.055659 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.055682 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.159144 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.159195 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.159212 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.159237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.159255 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.262957 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.263006 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.263022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.263044 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.263062 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.319791 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.319846 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.320169 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:09 crc kubenswrapper[4852]: E1201 20:06:09.320156 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:09 crc kubenswrapper[4852]: E1201 20:06:09.320334 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:09 crc kubenswrapper[4852]: E1201 20:06:09.320551 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.366143 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.366211 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.366235 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.366264 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.366287 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.469386 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.469436 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.469492 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.469515 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.469563 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.572421 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.572513 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.572530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.572552 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.572581 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.675819 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.675867 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.675885 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.675908 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.675925 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.779166 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.779572 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.779732 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.779877 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.780035 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.883097 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.883168 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.883207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.883237 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.883260 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.986096 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.986167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.986184 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.986214 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:09 crc kubenswrapper[4852]: I1201 20:06:09.986233 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:09Z","lastTransitionTime":"2025-12-01T20:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.089589 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.089665 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.089683 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.089707 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.089725 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.193336 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.193440 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.193487 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.193511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.193529 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.297333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.297398 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.297415 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.297442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.297487 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.319382 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:10 crc kubenswrapper[4852]: E1201 20:06:10.319902 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.320839 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:06:10 crc kubenswrapper[4852]: E1201 20:06:10.321114 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.338056 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.353379 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.369150 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.390213 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.400546 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.400634 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.400662 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.400694 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.400716 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.408758 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.430988 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.450587 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.482648 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77412ad4396e5938b5d750222fe39b5ddd6e438756948a1fb0226218c057da65\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:26Z\\\",\\\"message\\\":\\\", AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.21\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 20:05:26.340069 6445 services_controller.go:360] Finished syncing service kubernetes on namespace default for network=default : 4.431864ms\\\\nI1201 20:05:26.340083 6445 services_controller.go:356] Processing sync for service openshift-image-registry/image-registry for network=default\\\\nI1201 20:05:26.340101 6445 services_controller.go:356] Processing sync for service openshift-kube-controller-manager/kube-controller-manager for network=default\\\\nF1201 20:05:26.340108 6445 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:57Z\\\",\\\"message\\\":\\\"r.go:444] Built service openshift-console/console LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 services_controller.go:445] Built service openshift-console/console LB template configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 lb_config.go:1031] Cluster endpoints for openshift-console/downloads for network=default are: map[]\\\\nI1201 20:05:57.461834 6816 services_controller.go:443] Built service openshift-console/downloads LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.213\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:80, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 20:05:57.461844 6816 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461852 6816 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nF1201 20:05:57.461852 6816 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.503713 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.504027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.504053 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.504065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.504081 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.504089 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.522255 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.539935 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.563120 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.578507 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4677ab15-06de-4dbe-9ba8-e5b34645c84d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ff2e5b90958523977e4150eafee9e93676c4cadf56821d67af12044d1531e6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.599298 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.607412 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.607478 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.607523 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.607549 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.607569 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.620762 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.644710 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.659460 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.684408 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.707320 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://950b6e127ab8744d33f34ab17c79c3b69eb62775f7b8a0f33c6f116e9e8598d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.710935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.710999 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.711017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.711049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.711069 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.732751 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.753476 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e823f9e3-954c-4254-9f06-893905a28152\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b5c57c1092df145e61567526391b7053a1448ec47272518402c95567cb63de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csjlv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-j25pb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.773600 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b780a86d-16be-45d9-b9b0-52b532f630c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67246c63582a7882b312291c0cf5ed6677e9cfa2008ae0a06a108b5b445a72dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b33bd0aefbe99db696de6ab52300a28393ffd0f4bab41c1b47c0f1b5239f2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26525cc0279c54f152ce1e8e2a10e5f6ba46cb9292d80a6228bad5e2f8c5e0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbbb3febe4c47898a3570433e84a051d8f72e4b6a39bf5a5fc6729935989820c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.786534 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4677ab15-06de-4dbe-9ba8-e5b34645c84d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ff2e5b90958523977e4150eafee9e93676c4cadf56821d67af12044d1531e6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://675f8ddf05b0c46a62452e39f19fa0066d40314c0441382b97b835d73bcf712e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.797705 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.807969 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.813501 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.813533 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.813543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.813556 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.813565 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.824033 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a2eec37-e5b6-45bc-9e83-33be653a5dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59df1914a11127ce40523fa0931b1648dfb98ff93de36c92820678408fa088e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bab9c2b51d1a0e8c12755ea43dd24f18dd6af6e583661a14d22522ed2ee23a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65ae647cd149d0a5609d805487a59f8ac870e4dfbf956b8dfee02de53366f77a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a21e0f559413c5fb23d3f76e970ceafd2db1ec2be0081361283dc0ddc9abc2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33a0386f65b1513ba2220b948a1e1a91498d561345160b6a29ac4bc29f2a74a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0731a380d75e9f2da0e307cff274719f9648de2f61dc6952f4849d88c4375736\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c4fa73c9b14ed0bf372dd35da15dd8b77b79ac825406b7f32506adf8af99fa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lc2s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hjkrm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.835220 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5kxfk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f53bdc-e742-4661-a6b2-967f6847ade6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef7e3ad1fa7b5fa89a52090b097a5a14e02cfc8bd3eb04c148e0125bea961025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pttcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5kxfk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.846322 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65f7eaa5f392c76478438214823999d7731ea4b53922accb511be5f4960d0b05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8d6b89daaa90ac1c6a6381334055875bbbe51972efdcc9fd2e0798af9d64fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.856335 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82687d3817aa801403d6ae0e752eb2c7c3264a9918161705e5a0e6f234cc3874\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.866686 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dgxbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16363da-fb1f-4f6f-af39-70bf7783f3fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db1404e3e4f871d9522e3b2568b3f6cde1cf78e195a2895adbc34c5962f8169b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lh8kn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dgxbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.878588 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b04aef5d-1916-452d-8706-885e8e52f9ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bf2b48906c0337b849aea0ef469dfd16a760b3ecf336bf35e5baf24d401744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65a900eaefe4c40521f37f9cc4639141f3176b164da29d6f000aeaee13b52bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5m27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:05:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2x6sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.891389 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37d60ab3-c6bf-4833-a700-7f5bd29f8c41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6530554666a0595ca4a2bda1975886667ad46eea2f2acb0c7047a0b81d39bbb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec02bc6edc62a3f45ff18b3f9b2903d039340c03346cd1b3d12599c6a03b1ff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9177d4e0cf3e2c4e37235aefc15bb74f241b8083e59e68e76095be7a19954a77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.904835 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T20:04:58Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 20:04:52.568173 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 20:04:52.569576 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1036040590/tls.crt::/tmp/serving-cert-1036040590/tls.key\\\\\\\"\\\\nI1201 20:04:58.524169 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 20:04:58.527474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 20:04:58.527520 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 20:04:58.527549 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 20:04:58.527564 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 20:04:58.534019 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 20:04:58.534049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534057 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 20:04:58.534062 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 20:04:58.534066 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 20:04:58.534072 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 20:04:58.534076 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1201 20:04:58.534259 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1201 20:04:58.543444 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:04:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:04:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:04:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.915132 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h7nz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2q4c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.916108 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.916149 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.916166 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.916189 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.916205 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:10Z","lastTransitionTime":"2025-12-01T20:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.928974 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-cjd9b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c477f33-3400-4c50-b2fc-e9306088770e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:46Z\\\",\\\"message\\\":\\\"2025-12-01T20:05:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7\\\\n2025-12-01T20:05:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f666aaea-80f9-45e0-94fd-0e6a37ddc4f7 to /host/opt/cni/bin/\\\\n2025-12-01T20:05:01Z [verbose] multus-daemon started\\\\n2025-12-01T20:05:01Z [verbose] Readiness Indicator file check\\\\n2025-12-01T20:05:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-znznr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-cjd9b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:10 crc kubenswrapper[4852]: I1201 20:06:10.949480 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c6dd12e6-57a6-404a-8138-66e9cfa18d00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T20:04:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T20:05:57Z\\\",\\\"message\\\":\\\"r.go:444] Built service openshift-console/console LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 services_controller.go:445] Built service openshift-console/console LB template configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461825 6816 lb_config.go:1031] Cluster endpoints for openshift-console/downloads for network=default are: map[]\\\\nI1201 20:05:57.461834 6816 services_controller.go:443] Built service openshift-console/downloads LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.213\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:80, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 20:05:57.461844 6816 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 20:05:57.461852 6816 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nF1201 20:05:57.461852 6816 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9khtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T20:04:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-727gr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:10Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.019440 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.019481 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.019491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.019523 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.019537 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.121559 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.121633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.121652 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.121678 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.121697 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.225136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.225519 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.225715 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.225880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.226122 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.319127 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.319168 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.319242 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:11 crc kubenswrapper[4852]: E1201 20:06:11.319873 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:11 crc kubenswrapper[4852]: E1201 20:06:11.320050 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:11 crc kubenswrapper[4852]: E1201 20:06:11.319964 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.329486 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.329524 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.329536 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.329555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.329569 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.432732 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.432801 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.432826 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.432858 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.432883 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.535216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.535265 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.535276 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.535291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.535302 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.637789 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.638081 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.638271 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.638424 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.638589 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.741262 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.741303 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.741315 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.741331 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.741344 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.844119 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.844161 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.844173 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.844188 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.844199 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.947071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.947388 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.947517 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.947623 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:11 crc kubenswrapper[4852]: I1201 20:06:11.947718 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:11Z","lastTransitionTime":"2025-12-01T20:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.051035 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.051120 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.051145 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.051175 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.051197 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.154878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.154981 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.155000 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.155023 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.155039 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.258316 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.258401 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.258423 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.258461 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.258518 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.319667 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:12 crc kubenswrapper[4852]: E1201 20:06:12.319915 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.361089 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.361150 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.361170 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.361193 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.361212 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.464077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.464138 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.464156 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.464182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.464201 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.567329 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.567430 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.567480 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.567511 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.567533 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.670643 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.670734 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.670758 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.670789 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.670814 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.773999 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.774054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.774071 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.774099 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.774116 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.876530 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.876607 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.876650 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.876682 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.876706 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.979325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.979372 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.979384 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.979403 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:12 crc kubenswrapper[4852]: I1201 20:06:12.979445 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:12Z","lastTransitionTime":"2025-12-01T20:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.082710 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.082765 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.082782 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.082804 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.082824 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.185605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.185665 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.185683 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.185707 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.185723 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.288931 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.289067 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.289087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.289110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.289129 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.318953 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.319011 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.319052 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.319143 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.319372 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.319438 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.365049 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.366766 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.367091 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.392265 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.392317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.392343 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.392375 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.392398 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.495131 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.495200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.495219 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.495254 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.495279 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.598734 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.598805 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.598825 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.598850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.598868 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.701682 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.701738 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.701755 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.701775 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.701792 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.804437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.804489 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.804497 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.804512 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.804521 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.908107 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.908142 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.908153 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.908167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.908177 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.924288 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.924311 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.924318 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.924327 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.924335 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.943169 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.948002 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.948062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.948080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.948104 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.948122 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.968133 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.972777 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.972880 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.972910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.972944 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:13 crc kubenswrapper[4852]: I1201 20:06:13.972967 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:13Z","lastTransitionTime":"2025-12-01T20:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:13 crc kubenswrapper[4852]: E1201 20:06:13.995365 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:13Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.001492 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.001543 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.001561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.001585 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.001605 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: E1201 20:06:14.026243 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.032803 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.032860 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.032878 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.032906 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.032924 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: E1201 20:06:14.055964 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T20:06:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f1d127a-f97f-4747-ace7-8885db8f5b08\\\",\\\"systemUUID\\\":\\\"82eb6500-1744-4a7f-824c-21d40af3b228\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T20:06:14Z is after 2025-08-24T17:21:41Z" Dec 01 20:06:14 crc kubenswrapper[4852]: E1201 20:06:14.056198 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.058552 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.058605 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.058624 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.058650 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.058670 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.162062 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.162110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.162122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.162139 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.162151 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.265742 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.265801 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.265818 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.265843 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.265861 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.319721 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:14 crc kubenswrapper[4852]: E1201 20:06:14.319930 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.368975 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.369078 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.369101 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.369123 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.369139 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.471951 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.472007 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.472025 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.472049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.472068 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.575836 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.575893 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.575911 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.575935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.575952 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.679286 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.679342 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.679359 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.679383 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.679400 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.782531 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.782595 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.782615 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.782642 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.782663 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.885188 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.885267 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.885292 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.885318 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.885335 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.988119 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.988179 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.988196 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.988220 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:14 crc kubenswrapper[4852]: I1201 20:06:14.988239 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:14Z","lastTransitionTime":"2025-12-01T20:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.091922 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.091986 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.092006 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.092033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.092051 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.194775 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.194815 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.194824 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.194840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.194850 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.298043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.298106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.298122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.298151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.298171 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.319116 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.319244 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.319113 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:15 crc kubenswrapper[4852]: E1201 20:06:15.319315 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:15 crc kubenswrapper[4852]: E1201 20:06:15.319407 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:15 crc kubenswrapper[4852]: E1201 20:06:15.319605 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.401840 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.401935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.401954 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.402017 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.402037 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.504437 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.504538 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.504555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.504580 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.504598 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.607054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.607171 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.607242 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.607277 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.607341 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.709874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.709934 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.709951 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.709974 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.709992 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.812739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.812808 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.812821 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.812837 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.812849 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.915562 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.915592 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.915601 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.915615 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:15 crc kubenswrapper[4852]: I1201 20:06:15.915626 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:15Z","lastTransitionTime":"2025-12-01T20:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.018086 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.018117 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.018125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.018137 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.018145 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.120776 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.120829 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.120845 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.120866 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.120885 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.222736 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.222770 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.222797 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.222862 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.222871 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.319597 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:16 crc kubenswrapper[4852]: E1201 20:06:16.319784 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.325822 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.325863 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.325874 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.325890 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.325902 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.428325 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.428395 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.428412 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.428439 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.428499 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.531721 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.531774 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.531791 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.531814 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.531831 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.635207 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.635279 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.635302 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.635333 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.635355 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.738848 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.738914 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.738937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.738965 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.738987 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.842125 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.842208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.842236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.842267 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.842288 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.946073 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.946140 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.946158 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.946182 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:16 crc kubenswrapper[4852]: I1201 20:06:16.946203 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:16Z","lastTransitionTime":"2025-12-01T20:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.049087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.049158 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.049174 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.049222 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.049240 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.153200 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.153300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.153329 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.153364 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.153388 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.258123 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.258224 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.258251 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.258283 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.258307 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.319915 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.320052 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.320119 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:17 crc kubenswrapper[4852]: E1201 20:06:17.320507 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:17 crc kubenswrapper[4852]: E1201 20:06:17.320770 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:17 crc kubenswrapper[4852]: E1201 20:06:17.320873 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.361399 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.361473 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.361491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.361516 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.361530 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.465113 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.465176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.465188 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.465208 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.465223 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.569569 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.569675 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.569695 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.569718 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.569733 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.673093 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.673169 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.673189 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.673218 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.673243 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.775980 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.776051 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.776065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.776087 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.776102 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.879126 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.879193 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.879210 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.879236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.879257 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.983043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.983129 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.983152 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.983183 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:17 crc kubenswrapper[4852]: I1201 20:06:17.983204 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:17Z","lastTransitionTime":"2025-12-01T20:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.087231 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.087317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.087344 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.087380 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.087404 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.190639 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.190710 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.190729 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.190754 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.190771 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.294070 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.294146 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.294167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.294198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.294218 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.319644 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:18 crc kubenswrapper[4852]: E1201 20:06:18.319862 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.397515 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.397598 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.397622 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.397655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.397679 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.501099 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.501178 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.501212 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.501240 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.501260 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.604110 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.604202 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.604234 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.604268 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.604292 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.707561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.707655 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.707681 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.707717 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.707744 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.810851 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.810971 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.810998 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.811033 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.811053 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.915508 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.915592 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.915616 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.915648 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:18 crc kubenswrapper[4852]: I1201 20:06:18.915672 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:18Z","lastTransitionTime":"2025-12-01T20:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.019948 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.020043 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.020095 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.020133 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.020161 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.123985 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.124054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.124074 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.124103 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.124124 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.227970 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.228035 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.228054 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.228078 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.228097 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.319656 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.319713 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.319672 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:19 crc kubenswrapper[4852]: E1201 20:06:19.319899 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:19 crc kubenswrapper[4852]: E1201 20:06:19.319980 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:19 crc kubenswrapper[4852]: E1201 20:06:19.320136 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.331220 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.331291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.331300 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.331317 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.331328 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.435150 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.435216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.435236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.435263 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.435284 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.538989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.539067 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.539085 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.539128 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.539148 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.643743 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.643832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.643859 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.643894 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.643921 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.748151 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.748216 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.748236 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.748261 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.748279 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.854442 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.854846 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.855055 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.855247 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.855398 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.959390 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.959502 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.959523 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.959555 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:19 crc kubenswrapper[4852]: I1201 20:06:19.959577 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:19Z","lastTransitionTime":"2025-12-01T20:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.062897 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.062971 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.062992 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.063022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.063044 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.166935 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.166996 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.167013 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.167039 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.167057 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.271433 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.271570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.271599 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.271633 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.271656 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.319516 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:20 crc kubenswrapper[4852]: E1201 20:06:20.319781 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.375041 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.375111 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.375130 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.375164 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.375184 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.376167 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2x6sb" podStartSLOduration=80.376130435 podStartE2EDuration="1m20.376130435s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.351780043 +0000 UTC m=+100.278861520" watchObservedRunningTime="2025-12-01 20:06:20.376130435 +0000 UTC m=+100.303211892" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.416238 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dgxbk" podStartSLOduration=81.416195719 podStartE2EDuration="1m21.416195719s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.41593885 +0000 UTC m=+100.343020277" watchObservedRunningTime="2025-12-01 20:06:20.416195719 +0000 UTC m=+100.343277186" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.478642 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.478745 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.478763 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.478793 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.478812 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.505559 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-cjd9b" podStartSLOduration=81.505531214 podStartE2EDuration="1m21.505531214s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.455849839 +0000 UTC m=+100.382931266" watchObservedRunningTime="2025-12-01 20:06:20.505531214 +0000 UTC m=+100.432612641" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.528094 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=81.528068609 podStartE2EDuration="1m21.528068609s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.527175701 +0000 UTC m=+100.454257138" watchObservedRunningTime="2025-12-01 20:06:20.528068609 +0000 UTC m=+100.455150046" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.544551 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=81.544514603 podStartE2EDuration="1m21.544514603s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.544163702 +0000 UTC m=+100.471245149" watchObservedRunningTime="2025-12-01 20:06:20.544514603 +0000 UTC m=+100.471596030" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.581977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.582039 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.582056 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.582077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.582092 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.604223 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podStartSLOduration=81.604188901 podStartE2EDuration="1m21.604188901s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.603800148 +0000 UTC m=+100.530881595" watchObservedRunningTime="2025-12-01 20:06:20.604188901 +0000 UTC m=+100.531270328" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.669742 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-5kxfk" podStartSLOduration=80.6697005 podStartE2EDuration="1m20.6697005s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.669170244 +0000 UTC m=+100.596251681" watchObservedRunningTime="2025-12-01 20:06:20.6697005 +0000 UTC m=+100.596781937" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.670056 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-hjkrm" podStartSLOduration=81.670048551 podStartE2EDuration="1m21.670048551s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.642704665 +0000 UTC m=+100.569786092" watchObservedRunningTime="2025-12-01 20:06:20.670048551 +0000 UTC m=+100.597129988" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.685339 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.685825 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.685918 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.686008 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.686086 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.697565 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.697539762 podStartE2EDuration="48.697539762s" podCreationTimestamp="2025-12-01 20:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.697115628 +0000 UTC m=+100.624197065" watchObservedRunningTime="2025-12-01 20:06:20.697539762 +0000 UTC m=+100.624621179" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.734026 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=29.733996442 podStartE2EDuration="29.733996442s" podCreationTimestamp="2025-12-01 20:05:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:20.714546293 +0000 UTC m=+100.641627710" watchObservedRunningTime="2025-12-01 20:06:20.733996442 +0000 UTC m=+100.661077849" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.788833 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.788889 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.788910 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.788934 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.788953 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.891690 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.891739 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.891771 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.891793 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.891809 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.995400 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.995491 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.995517 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.995542 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:20 crc kubenswrapper[4852]: I1201 20:06:20.995558 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:20Z","lastTransitionTime":"2025-12-01T20:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.098951 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.099022 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.099038 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.099060 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.099078 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.202876 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.202977 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.202997 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.203029 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.203056 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.307106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.307167 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.307181 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.307202 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.307217 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.319225 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.319361 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:21 crc kubenswrapper[4852]: E1201 20:06:21.319431 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.319361 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:21 crc kubenswrapper[4852]: E1201 20:06:21.319644 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:21 crc kubenswrapper[4852]: E1201 20:06:21.320188 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.344852 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.410806 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.410856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.410868 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.410886 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.410900 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.514774 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.514850 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.514866 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.514901 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.514921 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.618136 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.618180 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.618190 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.618206 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.618220 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.721486 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.721561 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.721573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.721596 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.721613 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.824702 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.824765 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.824784 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.824805 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.824817 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.928499 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.928557 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.928570 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.928591 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:21 crc kubenswrapper[4852]: I1201 20:06:21.928606 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:21Z","lastTransitionTime":"2025-12-01T20:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.032596 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.032672 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.032691 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.032730 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.032751 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.136249 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.136330 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.136348 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.136374 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.136391 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.239284 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.239336 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.239352 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.239377 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.239393 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.320055 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:22 crc kubenswrapper[4852]: E1201 20:06:22.320270 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.342989 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.343066 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.343090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.343122 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.343144 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.446967 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.447049 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.447069 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.447103 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.447126 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.550445 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.550573 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.550589 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.550612 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.550632 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.653718 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.653788 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.653813 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.653856 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.653880 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.757116 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.757180 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.757198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.757221 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.757242 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.860929 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.861021 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.861046 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.861082 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.861113 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.965018 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.965089 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.965106 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.965132 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:22 crc kubenswrapper[4852]: I1201 20:06:22.965150 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:22Z","lastTransitionTime":"2025-12-01T20:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.068557 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.068610 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.068626 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.068650 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.068666 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.172118 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.172198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.172235 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.172268 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.172290 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.275080 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.275147 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.275165 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.275191 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.275207 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.319499 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.319557 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.319589 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:23 crc kubenswrapper[4852]: E1201 20:06:23.319729 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:23 crc kubenswrapper[4852]: E1201 20:06:23.319865 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:23 crc kubenswrapper[4852]: E1201 20:06:23.320018 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.378427 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.378506 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.378524 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.378545 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.378563 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.481700 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.481769 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.481788 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.482004 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.482022 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.584406 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.584514 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.584539 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.584572 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.584594 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.687994 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.688077 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.688104 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.688134 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.688156 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.790916 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.791005 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.791030 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.791061 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.791082 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.894291 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.894337 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.894349 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.894365 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.894377 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.997971 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.998045 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.998065 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.998090 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:23 crc kubenswrapper[4852]: I1201 20:06:23.998109 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:23Z","lastTransitionTime":"2025-12-01T20:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.100383 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.100517 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.100540 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.100602 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.100630 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:24Z","lastTransitionTime":"2025-12-01T20:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.203117 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.203176 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.203198 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.203226 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.203247 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:24Z","lastTransitionTime":"2025-12-01T20:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.306641 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.306709 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.306732 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.306762 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.306783 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:24Z","lastTransitionTime":"2025-12-01T20:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.319789 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:24 crc kubenswrapper[4852]: E1201 20:06:24.319987 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.409832 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.409894 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.409912 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.409937 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.409955 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:24Z","lastTransitionTime":"2025-12-01T20:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.447902 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.447992 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.448007 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.448027 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.448048 4852 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T20:06:24Z","lastTransitionTime":"2025-12-01T20:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.513312 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s"] Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.513918 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.516480 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.516968 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.516981 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.517114 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.572896 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.57286214 podStartE2EDuration="3.57286214s" podCreationTimestamp="2025-12-01 20:06:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:24.57092543 +0000 UTC m=+104.498006867" watchObservedRunningTime="2025-12-01 20:06:24.57286214 +0000 UTC m=+104.499943597" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.682267 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45a6a5d-c725-4690-9431-d8a4fdfa7418-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.682349 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.682399 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.682504 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e45a6a5d-c725-4690-9431-d8a4fdfa7418-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.682594 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e45a6a5d-c725-4690-9431-d8a4fdfa7418-service-ca\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784336 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45a6a5d-c725-4690-9431-d8a4fdfa7418-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784404 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784442 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784510 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e45a6a5d-c725-4690-9431-d8a4fdfa7418-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784568 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e45a6a5d-c725-4690-9431-d8a4fdfa7418-service-ca\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.785619 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.784591 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e45a6a5d-c725-4690-9431-d8a4fdfa7418-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.787601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e45a6a5d-c725-4690-9431-d8a4fdfa7418-service-ca\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.794800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45a6a5d-c725-4690-9431-d8a4fdfa7418-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.816524 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e45a6a5d-c725-4690-9431-d8a4fdfa7418-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-gvs8s\" (UID: \"e45a6a5d-c725-4690-9431-d8a4fdfa7418\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.839500 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" Dec 01 20:06:24 crc kubenswrapper[4852]: I1201 20:06:24.881580 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" event={"ID":"e45a6a5d-c725-4690-9431-d8a4fdfa7418","Type":"ContainerStarted","Data":"9a84e91ed3d40d081a4981889eb2cddb18d30ca0ee5ea506e946a303a231fe53"} Dec 01 20:06:25 crc kubenswrapper[4852]: I1201 20:06:25.319533 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:25 crc kubenswrapper[4852]: I1201 20:06:25.319571 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:25 crc kubenswrapper[4852]: I1201 20:06:25.319647 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:25 crc kubenswrapper[4852]: E1201 20:06:25.319839 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:25 crc kubenswrapper[4852]: E1201 20:06:25.320006 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:25 crc kubenswrapper[4852]: E1201 20:06:25.320139 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:25 crc kubenswrapper[4852]: I1201 20:06:25.887699 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" event={"ID":"e45a6a5d-c725-4690-9431-d8a4fdfa7418","Type":"ContainerStarted","Data":"b61d616e09ace6226e4d49b25f2f143e5558fc6135db71b76a634d4a3cb331da"} Dec 01 20:06:26 crc kubenswrapper[4852]: I1201 20:06:26.319653 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:26 crc kubenswrapper[4852]: E1201 20:06:26.319903 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:27 crc kubenswrapper[4852]: I1201 20:06:27.319844 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:27 crc kubenswrapper[4852]: I1201 20:06:27.319885 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:27 crc kubenswrapper[4852]: I1201 20:06:27.319856 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:27 crc kubenswrapper[4852]: E1201 20:06:27.320028 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:27 crc kubenswrapper[4852]: E1201 20:06:27.320149 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:27 crc kubenswrapper[4852]: E1201 20:06:27.320274 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:28 crc kubenswrapper[4852]: I1201 20:06:28.319412 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:28 crc kubenswrapper[4852]: E1201 20:06:28.319989 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:28 crc kubenswrapper[4852]: I1201 20:06:28.320511 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:06:28 crc kubenswrapper[4852]: E1201 20:06:28.320773 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-727gr_openshift-ovn-kubernetes(c6dd12e6-57a6-404a-8138-66e9cfa18d00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" Dec 01 20:06:29 crc kubenswrapper[4852]: I1201 20:06:29.319530 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:29 crc kubenswrapper[4852]: I1201 20:06:29.319611 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:29 crc kubenswrapper[4852]: I1201 20:06:29.319676 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:29 crc kubenswrapper[4852]: E1201 20:06:29.319787 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:29 crc kubenswrapper[4852]: E1201 20:06:29.319953 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:29 crc kubenswrapper[4852]: E1201 20:06:29.320080 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:30 crc kubenswrapper[4852]: I1201 20:06:30.319714 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:30 crc kubenswrapper[4852]: E1201 20:06:30.321089 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:31 crc kubenswrapper[4852]: I1201 20:06:31.319798 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:31 crc kubenswrapper[4852]: E1201 20:06:31.319991 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:31 crc kubenswrapper[4852]: I1201 20:06:31.320110 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:31 crc kubenswrapper[4852]: E1201 20:06:31.320223 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:31 crc kubenswrapper[4852]: I1201 20:06:31.320305 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:31 crc kubenswrapper[4852]: E1201 20:06:31.320400 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.320032 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:32 crc kubenswrapper[4852]: E1201 20:06:32.320226 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.911237 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/1.log" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.912501 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/0.log" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.912577 4852 generic.go:334] "Generic (PLEG): container finished" podID="6c477f33-3400-4c50-b2fc-e9306088770e" containerID="664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f" exitCode=1 Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.912632 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerDied","Data":"664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f"} Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.912692 4852 scope.go:117] "RemoveContainer" containerID="67edbf7021239b4a4a22cdb9d5c323d50308a6ec3b01537ec7ded2126b823293" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.913192 4852 scope.go:117] "RemoveContainer" containerID="664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f" Dec 01 20:06:32 crc kubenswrapper[4852]: E1201 20:06:32.913543 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-cjd9b_openshift-multus(6c477f33-3400-4c50-b2fc-e9306088770e)\"" pod="openshift-multus/multus-cjd9b" podUID="6c477f33-3400-4c50-b2fc-e9306088770e" Dec 01 20:06:32 crc kubenswrapper[4852]: I1201 20:06:32.943032 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-gvs8s" podStartSLOduration=92.94300919 podStartE2EDuration="1m32.94300919s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:25.903662728 +0000 UTC m=+105.830744185" watchObservedRunningTime="2025-12-01 20:06:32.94300919 +0000 UTC m=+112.870090637" Dec 01 20:06:33 crc kubenswrapper[4852]: I1201 20:06:33.318993 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:33 crc kubenswrapper[4852]: I1201 20:06:33.319032 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:33 crc kubenswrapper[4852]: I1201 20:06:33.319017 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:33 crc kubenswrapper[4852]: E1201 20:06:33.319111 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:33 crc kubenswrapper[4852]: E1201 20:06:33.319269 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:33 crc kubenswrapper[4852]: E1201 20:06:33.319359 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:33 crc kubenswrapper[4852]: I1201 20:06:33.918375 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/1.log" Dec 01 20:06:34 crc kubenswrapper[4852]: I1201 20:06:34.319430 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:34 crc kubenswrapper[4852]: E1201 20:06:34.319600 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:35 crc kubenswrapper[4852]: I1201 20:06:35.319430 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:35 crc kubenswrapper[4852]: I1201 20:06:35.319490 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:35 crc kubenswrapper[4852]: E1201 20:06:35.319715 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:35 crc kubenswrapper[4852]: I1201 20:06:35.319786 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:35 crc kubenswrapper[4852]: E1201 20:06:35.319944 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:35 crc kubenswrapper[4852]: E1201 20:06:35.320053 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:36 crc kubenswrapper[4852]: I1201 20:06:36.319989 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:36 crc kubenswrapper[4852]: E1201 20:06:36.320207 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:37 crc kubenswrapper[4852]: I1201 20:06:37.319774 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:37 crc kubenswrapper[4852]: I1201 20:06:37.319879 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:37 crc kubenswrapper[4852]: I1201 20:06:37.319806 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:37 crc kubenswrapper[4852]: E1201 20:06:37.319982 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:37 crc kubenswrapper[4852]: E1201 20:06:37.320180 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:37 crc kubenswrapper[4852]: E1201 20:06:37.320327 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:38 crc kubenswrapper[4852]: I1201 20:06:38.319032 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:38 crc kubenswrapper[4852]: E1201 20:06:38.319293 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:39 crc kubenswrapper[4852]: I1201 20:06:39.319275 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:39 crc kubenswrapper[4852]: I1201 20:06:39.319296 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:39 crc kubenswrapper[4852]: E1201 20:06:39.319491 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:39 crc kubenswrapper[4852]: I1201 20:06:39.319276 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:39 crc kubenswrapper[4852]: E1201 20:06:39.319797 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:39 crc kubenswrapper[4852]: E1201 20:06:39.319968 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:40 crc kubenswrapper[4852]: E1201 20:06:40.296003 4852 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 01 20:06:40 crc kubenswrapper[4852]: I1201 20:06:40.319746 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:40 crc kubenswrapper[4852]: E1201 20:06:40.322022 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:40 crc kubenswrapper[4852]: E1201 20:06:40.437734 4852 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.319768 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.319776 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:41 crc kubenswrapper[4852]: E1201 20:06:41.320329 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.320389 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:06:41 crc kubenswrapper[4852]: E1201 20:06:41.320501 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.319823 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:41 crc kubenswrapper[4852]: E1201 20:06:41.320660 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.953027 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/3.log" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.957530 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerStarted","Data":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.957970 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:06:41 crc kubenswrapper[4852]: I1201 20:06:41.984519 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podStartSLOduration=102.984445663 podStartE2EDuration="1m42.984445663s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:41.981668116 +0000 UTC m=+121.908749553" watchObservedRunningTime="2025-12-01 20:06:41.984445663 +0000 UTC m=+121.911527130" Dec 01 20:06:42 crc kubenswrapper[4852]: I1201 20:06:42.200308 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j2q4c"] Dec 01 20:06:42 crc kubenswrapper[4852]: I1201 20:06:42.201049 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:42 crc kubenswrapper[4852]: E1201 20:06:42.201194 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:42 crc kubenswrapper[4852]: I1201 20:06:42.319333 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:42 crc kubenswrapper[4852]: E1201 20:06:42.319492 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:43 crc kubenswrapper[4852]: I1201 20:06:43.319931 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:43 crc kubenswrapper[4852]: I1201 20:06:43.319931 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:43 crc kubenswrapper[4852]: E1201 20:06:43.320151 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:43 crc kubenswrapper[4852]: E1201 20:06:43.320312 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:44 crc kubenswrapper[4852]: I1201 20:06:44.320057 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:44 crc kubenswrapper[4852]: I1201 20:06:44.320161 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:44 crc kubenswrapper[4852]: E1201 20:06:44.320310 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:44 crc kubenswrapper[4852]: E1201 20:06:44.320645 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:45 crc kubenswrapper[4852]: I1201 20:06:45.319837 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:45 crc kubenswrapper[4852]: I1201 20:06:45.319864 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:45 crc kubenswrapper[4852]: E1201 20:06:45.319987 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:45 crc kubenswrapper[4852]: E1201 20:06:45.320034 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:45 crc kubenswrapper[4852]: E1201 20:06:45.439410 4852 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 20:06:46 crc kubenswrapper[4852]: I1201 20:06:46.319747 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:46 crc kubenswrapper[4852]: I1201 20:06:46.319757 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:46 crc kubenswrapper[4852]: E1201 20:06:46.320543 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:46 crc kubenswrapper[4852]: E1201 20:06:46.320268 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:46 crc kubenswrapper[4852]: I1201 20:06:46.320732 4852 scope.go:117] "RemoveContainer" containerID="664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f" Dec 01 20:06:46 crc kubenswrapper[4852]: I1201 20:06:46.982674 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/1.log" Dec 01 20:06:46 crc kubenswrapper[4852]: I1201 20:06:46.983111 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerStarted","Data":"877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec"} Dec 01 20:06:47 crc kubenswrapper[4852]: I1201 20:06:47.319699 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:47 crc kubenswrapper[4852]: I1201 20:06:47.319699 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:47 crc kubenswrapper[4852]: E1201 20:06:47.319924 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:47 crc kubenswrapper[4852]: E1201 20:06:47.320063 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:48 crc kubenswrapper[4852]: I1201 20:06:48.320005 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:48 crc kubenswrapper[4852]: I1201 20:06:48.320003 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:48 crc kubenswrapper[4852]: E1201 20:06:48.320230 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:48 crc kubenswrapper[4852]: E1201 20:06:48.320318 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:49 crc kubenswrapper[4852]: I1201 20:06:49.319525 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:49 crc kubenswrapper[4852]: I1201 20:06:49.319612 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:49 crc kubenswrapper[4852]: E1201 20:06:49.319720 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 20:06:49 crc kubenswrapper[4852]: E1201 20:06:49.319838 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 20:06:50 crc kubenswrapper[4852]: I1201 20:06:50.319124 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:50 crc kubenswrapper[4852]: E1201 20:06:50.321022 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2q4c" podUID="7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a" Dec 01 20:06:50 crc kubenswrapper[4852]: I1201 20:06:50.321125 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:50 crc kubenswrapper[4852]: E1201 20:06:50.321374 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.319440 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.319597 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.323478 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.323832 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.324232 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 20:06:51 crc kubenswrapper[4852]: I1201 20:06:51.325382 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 20:06:52 crc kubenswrapper[4852]: I1201 20:06:52.319407 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:06:52 crc kubenswrapper[4852]: I1201 20:06:52.319817 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:06:52 crc kubenswrapper[4852]: I1201 20:06:52.323240 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 20:06:52 crc kubenswrapper[4852]: I1201 20:06:52.323749 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.211932 4852 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.262665 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.263077 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.267384 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tb6ll"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.268239 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.273890 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.279741 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.297175 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.306215 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.308168 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.306122 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.308715 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.308715 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.309273 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.309859 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.310089 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.310230 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.310591 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.311861 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312044 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312054 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312285 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312429 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312662 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312179 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.312941 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.316263 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.318690 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.319432 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.319664 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q48jr"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.321386 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.321834 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.322089 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.322901 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.323539 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.324071 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.325578 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.325780 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326099 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326113 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326265 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326528 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326759 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.326951 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.327159 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.328268 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.328534 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.328719 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.328889 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.331237 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.331559 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.331717 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.331871 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.331879 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332098 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332262 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332436 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332681 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.333274 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332442 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.332685 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.336617 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.342954 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.342993 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343036 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-images\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343068 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2gtg\" (UniqueName: \"kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343118 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvmxk\" (UniqueName: \"kubernetes.io/projected/e476ff27-7c39-4627-b799-282107cac068-kube-api-access-zvmxk\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343144 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343180 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-config\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343203 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e476ff27-7c39-4627-b799-282107cac068-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.343227 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.350916 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.351284 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.351180 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.351685 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.351932 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.352519 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wnjpd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.352863 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.353100 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.353667 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.353728 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.353739 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.353789 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.354024 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.354576 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2ccf2"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.354927 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.354930 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.356013 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-nqllf"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.356148 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.356579 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.357528 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.364685 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.364842 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.367356 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.370130 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ts9nz"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.365347 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.367995 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.357080 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.370407 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.377328 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.377999 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.383342 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.383684 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.430243 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.431012 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.431346 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.431472 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t5h7h"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.432247 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.432726 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.432889 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433057 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433154 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433259 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433271 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433359 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.433982 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434185 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434366 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434443 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434485 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434372 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434369 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434567 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434590 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434610 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434631 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434665 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434686 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434698 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.434734 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.437720 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.437837 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.437940 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.438042 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.438171 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.438271 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.438364 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.439230 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.439898 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440106 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440241 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440340 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440644 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440865 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.441130 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.441327 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.441444 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.441532 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.441748 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.440273 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.443718 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.452444 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdjnr\" (UniqueName: \"kubernetes.io/projected/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-kube-api-access-sdjnr\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.463531 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.448675 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mbbfj"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.463797 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.465644 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.466006 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2gtg\" (UniqueName: \"kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.466199 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-config\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.466306 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-images\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.466405 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvmxk\" (UniqueName: \"kubernetes.io/projected/e476ff27-7c39-4627-b799-282107cac068-kube-api-access-zvmxk\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.447587 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.468330 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.448036 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.448189 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.449093 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.453959 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.469357 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-images\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.447138 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.467063 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-serving-cert\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.470232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.474884 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-service-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.480156 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.480603 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-config\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.481508 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e476ff27-7c39-4627-b799-282107cac068-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.476827 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.480446 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.497898 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.498486 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.477795 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.480547 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.501067 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.502438 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.478804 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.505770 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e476ff27-7c39-4627-b799-282107cac068-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.478423 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.481809 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e476ff27-7c39-4627-b799-282107cac068-config\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.479336 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.508872 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.479164 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.509321 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.510084 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.510881 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.511342 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-pvvl9"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.511866 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.513698 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.514261 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.515130 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.516837 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.518890 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.520606 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.521406 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.522666 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zfljp"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.523496 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.524894 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-t6knk"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.524929 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.525643 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.526674 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.527726 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.528063 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.528659 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.529210 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4k28k"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.529704 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.531146 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.531770 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.532371 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.532980 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.533820 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.534520 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.535856 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.537786 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tb6ll"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.539581 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.540998 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.542859 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-qht6p"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.544136 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.545626 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.546495 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ts9nz"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.547831 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q48jr"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.549411 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2ccf2"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.550983 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.552082 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-nqllf"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.553153 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.554149 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.555180 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.556172 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.557260 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.559802 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.561127 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.562103 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.563098 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wnjpd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.564015 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.564992 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.565305 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.565935 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.566907 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.567858 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.569007 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.570127 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zfljp"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.571271 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.572498 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7rzbn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.573472 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-mwxfn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.574282 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.576258 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.577861 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mbbfj"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.580985 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.583179 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-config\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.583309 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-serving-cert\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.583377 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-service-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.583791 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.584371 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-config\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.585091 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdjnr\" (UniqueName: \"kubernetes.io/projected/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-kube-api-access-sdjnr\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.585217 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-service-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.585554 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.585781 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.587067 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-serving-cert\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.587789 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.589624 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.592481 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.595391 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.596572 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.597729 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7rzbn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.598820 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-t6knk"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.600253 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-mwxfn"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.601962 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t5h7h"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.602997 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4k28k"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.604003 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-68zfl"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.605306 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.605521 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-68zfl"] Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.605665 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.625134 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.644999 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.665269 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.705111 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.734154 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.745566 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.765117 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.785757 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.805354 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.826094 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.846273 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.867295 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.885126 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.906304 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.926351 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.946278 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.965294 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 20:06:55 crc kubenswrapper[4852]: I1201 20:06:55.985550 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.005885 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.025474 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.091424 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvmxk\" (UniqueName: \"kubernetes.io/projected/e476ff27-7c39-4627-b799-282107cac068-kube-api-access-zvmxk\") pod \"machine-api-operator-5694c8668f-tb6ll\" (UID: \"e476ff27-7c39-4627-b799-282107cac068\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.104606 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2gtg\" (UniqueName: \"kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg\") pod \"controller-manager-879f6c89f-rxccs\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.104747 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.125725 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.145722 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.165040 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.185189 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.205830 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.210614 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.226242 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.242244 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.246477 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.265823 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.286650 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.306739 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.326380 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.346903 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.366405 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.385691 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.406486 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.426449 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.444880 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.455333 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.465575 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.465608 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tb6ll"] Dec 01 20:06:56 crc kubenswrapper[4852]: W1201 20:06:56.471636 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode476ff27_7c39_4627_b799_282107cac068.slice/crio-180c7fed0a048aa05817c119b419aa4a5b5cd64bd0a78ce5ca1426bf4f1a5ba8 WatchSource:0}: Error finding container 180c7fed0a048aa05817c119b419aa4a5b5cd64bd0a78ce5ca1426bf4f1a5ba8: Status 404 returned error can't find the container with id 180c7fed0a048aa05817c119b419aa4a5b5cd64bd0a78ce5ca1426bf4f1a5ba8 Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.485671 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.505477 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.524329 4852 request.go:700] Waited for 1.007088756s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-metrics-certs-default&limit=500&resourceVersion=0 Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.525865 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.545341 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.565572 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.585244 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.604680 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.626203 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.645166 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.665445 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.688709 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.705252 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.727310 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.746438 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.766675 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.785309 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.807603 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.826330 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.846034 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.866839 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.886041 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.906441 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.926042 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.946368 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.967191 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 20:06:56 crc kubenswrapper[4852]: I1201 20:06:56.985506 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.005824 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.021860 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" event={"ID":"e476ff27-7c39-4627-b799-282107cac068","Type":"ContainerStarted","Data":"bdb9b285e3fd7cd2a1b503866586c70e33d43cc0bccd60bdba4ed8f3c550a529"} Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.021908 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" event={"ID":"e476ff27-7c39-4627-b799-282107cac068","Type":"ContainerStarted","Data":"67e6bdd67c4ff39b26dc87efe3d5600c9c7f5dfe88d41bf903a5416178051ba3"} Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.021924 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" event={"ID":"e476ff27-7c39-4627-b799-282107cac068","Type":"ContainerStarted","Data":"180c7fed0a048aa05817c119b419aa4a5b5cd64bd0a78ce5ca1426bf4f1a5ba8"} Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.023516 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" event={"ID":"b2ae2ef7-e308-4896-b697-31b8241dffca","Type":"ContainerStarted","Data":"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426"} Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.023583 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" event={"ID":"b2ae2ef7-e308-4896-b697-31b8241dffca","Type":"ContainerStarted","Data":"9e3fbf43ad64cae5b6055239262e998ce4fef6086a2060106fdbdff8924d4939"} Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.023741 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.025864 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.026273 4852 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rxccs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.026334 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.045668 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.065051 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.085205 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.105237 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.125066 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.144829 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.165759 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.185554 4852 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.206850 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.226558 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.245603 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.265968 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.306574 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.309707 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdjnr\" (UniqueName: \"kubernetes.io/projected/fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b-kube-api-access-sdjnr\") pod \"authentication-operator-69f744f599-2ccf2\" (UID: \"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.325977 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.347635 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.355406 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.366944 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408185 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408266 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-client\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408323 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-serving-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408365 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-encryption-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408434 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408478 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408506 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408531 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sfqq\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-kube-api-access-8sfqq\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408554 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdp2w\" (UniqueName: \"kubernetes.io/projected/84b94460-3d16-466f-bb73-b65f633d16d7-kube-api-access-gdp2w\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-client\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408605 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-serving-cert\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408625 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408659 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408692 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408716 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57147412-f20a-4dbd-9337-4839c074baaf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408740 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408776 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-trusted-ca\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408811 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj662\" (UniqueName: \"kubernetes.io/projected/e8955ebb-0525-480b-8ab4-f2b9d808b69f-kube-api-access-kj662\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408835 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408857 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/878a6eea-f252-41c4-b674-2c0e4ade05f0-machine-approver-tls\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408879 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408900 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408923 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbwsw\" (UniqueName: \"kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408944 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408964 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6e0af9-aa12-497b-b39d-562dda0b4127-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.408991 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fwvm\" (UniqueName: \"kubernetes.io/projected/ee1b682a-9dad-4a65-9f73-9bf26af43729-kube-api-access-8fwvm\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409024 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-dir\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409047 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409074 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb648\" (UniqueName: \"kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409131 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57147412-f20a-4dbd-9337-4839c074baaf-serving-cert\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409148 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-client\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409164 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409214 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-config\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409236 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/18202e52-4e9d-49b2-9214-8f9e55684bbb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409253 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-auth-proxy-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409292 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jj9n\" (UniqueName: \"kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409341 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18202e52-4e9d-49b2-9214-8f9e55684bbb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409361 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18202e52-4e9d-49b2-9214-8f9e55684bbb-config\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409795 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69d514ec-24fc-4900-a812-fa1ca252b98f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409938 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.409999 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410034 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee1b682a-9dad-4a65-9f73-9bf26af43729-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410062 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5svcx\" (UniqueName: \"kubernetes.io/projected/c4211bad-89db-417b-acc1-5097f9b656cf-kube-api-access-5svcx\") pod \"migrator-59844c95c7-fs2n7\" (UID: \"c4211bad-89db-417b-acc1-5097f9b656cf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410096 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410140 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410172 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410211 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410238 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410265 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-service-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410343 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410376 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qth4k\" (UniqueName: \"kubernetes.io/projected/878a6eea-f252-41c4-b674-2c0e4ade05f0-kube-api-access-qth4k\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410423 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410443 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410493 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-policies\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f6e0af9-aa12-497b-b39d-562dda0b4127-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410632 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jm8n\" (UniqueName: \"kubernetes.io/projected/57147412-f20a-4dbd-9337-4839c074baaf-kube-api-access-6jm8n\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410663 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410871 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit-dir\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.410976 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411009 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhnlw\" (UniqueName: \"kubernetes.io/projected/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-kube-api-access-jhnlw\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411036 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411060 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8955ebb-0525-480b-8ab4-f2b9d808b69f-serving-cert\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411080 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-encryption-config\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411099 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kl85\" (UniqueName: \"kubernetes.io/projected/a55c2bbe-e79b-41f2-9349-8d96592516d1-kube-api-access-8kl85\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411132 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411158 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411179 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghtx8\" (UniqueName: \"kubernetes.io/projected/69d514ec-24fc-4900-a812-fa1ca252b98f-kube-api-access-ghtx8\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411201 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-node-pullsecrets\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411226 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411250 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411276 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411295 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-serving-cert\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411328 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-serving-cert\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411362 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411388 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411409 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27x84\" (UniqueName: \"kubernetes.io/projected/79fbfc7e-85e3-4d27-8aec-c0157592c888-kube-api-access-27x84\") pod \"downloads-7954f5f757-nqllf\" (UID: \"79fbfc7e-85e3-4d27-8aec-c0157592c888\") " pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411431 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-image-import-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.411464 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:57.911434867 +0000 UTC m=+137.838516284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411486 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z22r7\" (UniqueName: \"kubernetes.io/projected/8f2fd72c-c9d0-400a-9658-b1a89365a32e-kube-api-access-z22r7\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411580 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsssr\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411609 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1b682a-9dad-4a65-9f73-9bf26af43729-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411629 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-config\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411654 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411678 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411707 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411727 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.411755 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513235 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.513552 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.013505723 +0000 UTC m=+137.940587140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513842 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-cabundle\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513873 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513891 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513912 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-trusted-ca\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513926 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj662\" (UniqueName: \"kubernetes.io/projected/e8955ebb-0525-480b-8ab4-f2b9d808b69f-kube-api-access-kj662\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513944 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513967 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.513986 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-images\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514002 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514020 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514039 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j7qf\" (UniqueName: \"kubernetes.io/projected/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-kube-api-access-4j7qf\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514055 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/08729539-55f5-4d1c-a952-9af42aa77b9c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514080 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514094 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb648\" (UniqueName: \"kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514109 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-client\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514125 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514140 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18202e52-4e9d-49b2-9214-8f9e55684bbb-config\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514155 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-auth-proxy-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514170 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-metrics-tls\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514186 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-node-bootstrap-token\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514217 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jj9n\" (UniqueName: \"kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514232 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18202e52-4e9d-49b2-9214-8f9e55684bbb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514249 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80abad70-6989-429d-9a1b-80163a0fdc2b-config\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514266 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-metrics-certs\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514284 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514300 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz66h\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-kube-api-access-kz66h\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514315 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514331 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514346 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b0d0b09-9413-4609-84a5-75665474cde3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514364 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlrpk\" (UniqueName: \"kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514383 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514400 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514417 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-registration-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514433 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-service-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514468 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh6mx\" (UniqueName: \"kubernetes.io/projected/c7b107d8-6494-4f33-98d7-e97742749210-kube-api-access-jh6mx\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514485 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514506 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qth4k\" (UniqueName: \"kubernetes.io/projected/878a6eea-f252-41c4-b674-2c0e4ade05f0-kube-api-access-qth4k\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514521 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514538 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnkdg\" (UniqueName: \"kubernetes.io/projected/1e82e35b-0155-4532-867b-b87d8c789575-kube-api-access-mnkdg\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514554 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514554 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-policies\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514590 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-proxy-tls\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514607 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514623 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8955ebb-0525-480b-8ab4-f2b9d808b69f-serving-cert\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514641 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93085d90-0464-4c0c-9908-237d7cb85b24-service-ca-bundle\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514655 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p6xx\" (UniqueName: \"kubernetes.io/projected/4b0d0b09-9413-4609-84a5-75665474cde3-kube-api-access-5p6xx\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514673 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514689 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhnlw\" (UniqueName: \"kubernetes.io/projected/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-kube-api-access-jhnlw\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514703 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-key\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514719 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghtx8\" (UniqueName: \"kubernetes.io/projected/69d514ec-24fc-4900-a812-fa1ca252b98f-kube-api-access-ghtx8\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514750 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kl85\" (UniqueName: \"kubernetes.io/projected/a55c2bbe-e79b-41f2-9349-8d96592516d1-kube-api-access-8kl85\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514766 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-node-pullsecrets\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514781 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514797 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514812 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514829 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514844 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514860 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv2sk\" (UniqueName: \"kubernetes.io/projected/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-kube-api-access-cv2sk\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514876 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514891 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-serving-cert\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514905 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e82e35b-0155-4532-867b-b87d8c789575-cert\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514921 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-default-certificate\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514934 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-stats-auth\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514949 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zs5g\" (UniqueName: \"kubernetes.io/projected/53099b1c-1857-463d-922b-e1d61ccda3fd-kube-api-access-2zs5g\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514964 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-serving-cert\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514980 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvktw\" (UniqueName: \"kubernetes.io/projected/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-kube-api-access-mvktw\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.514998 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27x84\" (UniqueName: \"kubernetes.io/projected/79fbfc7e-85e3-4d27-8aec-c0157592c888-kube-api-access-27x84\") pod \"downloads-7954f5f757-nqllf\" (UID: \"79fbfc7e-85e3-4d27-8aec-c0157592c888\") " pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515014 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z22r7\" (UniqueName: \"kubernetes.io/projected/8f2fd72c-c9d0-400a-9658-b1a89365a32e-kube-api-access-z22r7\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515029 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515044 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515055 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515061 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515111 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-socket-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515140 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515159 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-serving-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515183 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515206 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-srv-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515230 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515251 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515270 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-config-volume\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515308 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-client\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515327 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sfqq\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-kube-api-access-8sfqq\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515346 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdp2w\" (UniqueName: \"kubernetes.io/projected/84b94460-3d16-466f-bb73-b65f633d16d7-kube-api-access-gdp2w\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515388 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg76t\" (UniqueName: \"kubernetes.io/projected/4458fbe8-b293-43d8-8dd3-3a443b22191f-kube-api-access-sg76t\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515406 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515430 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57147412-f20a-4dbd-9337-4839c074baaf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515467 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-serving-cert\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515486 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515508 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515535 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctcpz\" (UniqueName: \"kubernetes.io/projected/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-kube-api-access-ctcpz\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515555 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0d0b09-9413-4609-84a5-75665474cde3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515572 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/878a6eea-f252-41c4-b674-2c0e4ade05f0-machine-approver-tls\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515591 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515611 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbwsw\" (UniqueName: \"kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515629 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-srv-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515649 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6e0af9-aa12-497b-b39d-562dda0b4127-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515669 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fwvm\" (UniqueName: \"kubernetes.io/projected/ee1b682a-9dad-4a65-9f73-9bf26af43729-kube-api-access-8fwvm\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515686 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2mp5\" (UniqueName: \"kubernetes.io/projected/21a47c79-31fd-4082-86f4-8b902af7b09d-kube-api-access-d2mp5\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515706 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-dir\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515723 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d760681-acd7-4c14-9510-74a753796ac9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515740 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57147412-f20a-4dbd-9337-4839c074baaf-serving-cert\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515760 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-config\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515777 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/18202e52-4e9d-49b2-9214-8f9e55684bbb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515795 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69d514ec-24fc-4900-a812-fa1ca252b98f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515809 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-trusted-ca\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515845 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80abad70-6989-429d-9a1b-80163a0fdc2b-serving-cert\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515861 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-metrics-tls\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515878 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-metrics-tls\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515895 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515914 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee1b682a-9dad-4a65-9f73-9bf26af43729-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515933 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5svcx\" (UniqueName: \"kubernetes.io/projected/c4211bad-89db-417b-acc1-5097f9b656cf-kube-api-access-5svcx\") pod \"migrator-59844c95c7-fs2n7\" (UID: \"c4211bad-89db-417b-acc1-5097f9b656cf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515949 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/65011ad9-c976-4071-b7eb-81f7e3c4975e-proxy-tls\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515967 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc60626-0001-47d7-a0f7-46450b095e84-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515982 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-webhook-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.515996 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbvzg\" (UniqueName: \"kubernetes.io/projected/80abad70-6989-429d-9a1b-80163a0fdc2b-kube-api-access-hbvzg\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516016 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516034 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516050 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516067 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-profile-collector-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516085 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516101 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d760681-acd7-4c14-9510-74a753796ac9-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516116 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnhgw\" (UniqueName: \"kubernetes.io/projected/2440e229-6826-42f6-8e47-7026c3963cb2-kube-api-access-qnhgw\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516155 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516170 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-plugins-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516188 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f6e0af9-aa12-497b-b39d-562dda0b4127-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516220 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jm8n\" (UniqueName: \"kubernetes.io/projected/57147412-f20a-4dbd-9337-4839c074baaf-kube-api-access-6jm8n\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516251 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwjmw\" (UniqueName: \"kubernetes.io/projected/7e3d25d9-8048-4adf-9294-e090ab509fdd-kube-api-access-jwjmw\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516274 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit-dir\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516291 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-encryption-config\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516314 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516332 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516355 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d760681-acd7-4c14-9510-74a753796ac9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516378 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-mountpoint-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516394 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffc60626-0001-47d7-a0f7-46450b095e84-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnzt7\" (UniqueName: \"kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516427 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp2hs\" (UniqueName: \"kubernetes.io/projected/08729539-55f5-4d1c-a952-9af42aa77b9c-kube-api-access-bp2hs\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516442 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-csi-data-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516495 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516515 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516535 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1b682a-9dad-4a65-9f73-9bf26af43729-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516552 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-config\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516575 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-image-import-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516593 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/21a47c79-31fd-4082-86f4-8b902af7b09d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516599 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516613 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsssr\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516725 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516770 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gxhz\" (UniqueName: \"kubernetes.io/projected/93085d90-0464-4c0c-9908-237d7cb85b24-kube-api-access-2gxhz\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516805 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2440e229-6826-42f6-8e47-7026c3963cb2-tmpfs\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516832 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk66z\" (UniqueName: \"kubernetes.io/projected/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-kube-api-access-xk66z\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516875 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516905 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516932 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc60626-0001-47d7-a0f7-46450b095e84-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.516970 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-client\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517001 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-encryption-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517027 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4lzf\" (UniqueName: \"kubernetes.io/projected/65011ad9-c976-4071-b7eb-81f7e3c4975e-kube-api-access-x4lzf\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517055 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-certs\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517158 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18202e52-4e9d-49b2-9214-8f9e55684bbb-config\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517577 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878a6eea-f252-41c4-b674-2c0e4ade05f0-auth-proxy-config\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.517857 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.518359 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.518873 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-node-pullsecrets\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.520120 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.520279 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.520369 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-serving-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.520520 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-trusted-ca\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.521375 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8955ebb-0525-480b-8ab4-f2b9d808b69f-config\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.522773 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit-dir\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.523941 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.524122 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-audit\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.524730 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.525263 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.526132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.526873 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.527992 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.027972748 +0000 UTC m=+137.955054165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.531390 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.533811 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-dir\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.534008 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-image-import-ca\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.534281 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-encryption-config\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.537076 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6e0af9-aa12-497b-b39d-562dda0b4127-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.537749 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.537834 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.537910 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.538150 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-client\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.538228 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57147412-f20a-4dbd-9337-4839c074baaf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.546381 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.550305 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee1b682a-9dad-4a65-9f73-9bf26af43729-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.550856 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-etcd-client\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.550992 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.551358 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-serving-cert\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.551429 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.551972 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.553492 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.553688 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1b682a-9dad-4a65-9f73-9bf26af43729-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.546727 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-encryption-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.555293 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-config\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.555567 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.556016 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.556054 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-config\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.557047 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a55c2bbe-e79b-41f2-9349-8d96592516d1-audit-policies\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.557074 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/84b94460-3d16-466f-bb73-b65f633d16d7-etcd-service-ca\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.557736 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.557766 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/878a6eea-f252-41c4-b674-2c0e4ade05f0-machine-approver-tls\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.547707 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57147412-f20a-4dbd-9337-4839c074baaf-serving-cert\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.558925 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.559325 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.559790 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.559803 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.562381 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.563536 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55c2bbe-e79b-41f2-9349-8d96592516d1-serving-cert\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.567004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f2fd72c-c9d0-400a-9658-b1a89365a32e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.568255 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18202e52-4e9d-49b2-9214-8f9e55684bbb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.569686 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.569808 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f6e0af9-aa12-497b-b39d-562dda0b4127-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.570355 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69d514ec-24fc-4900-a812-fa1ca252b98f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.572105 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsssr\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.572486 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.582732 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.582817 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8955ebb-0525-480b-8ab4-f2b9d808b69f-serving-cert\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.583762 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2ccf2"] Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.584524 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.584779 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b94460-3d16-466f-bb73-b65f633d16d7-serving-cert\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.585747 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f2fd72c-c9d0-400a-9658-b1a89365a32e-etcd-client\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.591503 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jj9n\" (UniqueName: \"kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n\") pod \"route-controller-manager-6576b87f9c-zvtsd\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.600738 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.623793 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb648\" (UniqueName: \"kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648\") pod \"console-f9d7485db-glnwk\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624181 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624339 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/21a47c79-31fd-4082-86f4-8b902af7b09d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624373 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk66z\" (UniqueName: \"kubernetes.io/projected/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-kube-api-access-xk66z\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624391 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624408 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gxhz\" (UniqueName: \"kubernetes.io/projected/93085d90-0464-4c0c-9908-237d7cb85b24-kube-api-access-2gxhz\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624423 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2440e229-6826-42f6-8e47-7026c3963cb2-tmpfs\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624444 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-certs\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624484 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc60626-0001-47d7-a0f7-46450b095e84-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624499 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4lzf\" (UniqueName: \"kubernetes.io/projected/65011ad9-c976-4071-b7eb-81f7e3c4975e-kube-api-access-x4lzf\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-cabundle\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624538 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-images\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624554 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624571 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j7qf\" (UniqueName: \"kubernetes.io/projected/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-kube-api-access-4j7qf\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624587 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/08729539-55f5-4d1c-a952-9af42aa77b9c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624610 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-metrics-tls\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624623 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-node-bootstrap-token\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624643 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80abad70-6989-429d-9a1b-80163a0fdc2b-config\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624660 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz66h\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-kube-api-access-kz66h\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624673 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-metrics-certs\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624689 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlrpk\" (UniqueName: \"kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624703 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b0d0b09-9413-4609-84a5-75665474cde3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624719 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-registration-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624734 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624756 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh6mx\" (UniqueName: \"kubernetes.io/projected/c7b107d8-6494-4f33-98d7-e97742749210-kube-api-access-jh6mx\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624771 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624789 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnkdg\" (UniqueName: \"kubernetes.io/projected/1e82e35b-0155-4532-867b-b87d8c789575-kube-api-access-mnkdg\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624805 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-proxy-tls\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624829 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93085d90-0464-4c0c-9908-237d7cb85b24-service-ca-bundle\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624844 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p6xx\" (UniqueName: \"kubernetes.io/projected/4b0d0b09-9413-4609-84a5-75665474cde3-kube-api-access-5p6xx\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624861 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-key\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624878 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624903 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv2sk\" (UniqueName: \"kubernetes.io/projected/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-kube-api-access-cv2sk\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624918 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624931 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624947 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624963 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e82e35b-0155-4532-867b-b87d8c789575-cert\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624978 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-default-certificate\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.624994 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-stats-auth\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625009 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zs5g\" (UniqueName: \"kubernetes.io/projected/53099b1c-1857-463d-922b-e1d61ccda3fd-kube-api-access-2zs5g\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625026 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvktw\" (UniqueName: \"kubernetes.io/projected/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-kube-api-access-mvktw\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625053 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-socket-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625069 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-srv-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625086 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-config-volume\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625121 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg76t\" (UniqueName: \"kubernetes.io/projected/4458fbe8-b293-43d8-8dd3-3a443b22191f-kube-api-access-sg76t\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625139 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625156 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0d0b09-9413-4609-84a5-75665474cde3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625181 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctcpz\" (UniqueName: \"kubernetes.io/projected/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-kube-api-access-ctcpz\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625204 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2mp5\" (UniqueName: \"kubernetes.io/projected/21a47c79-31fd-4082-86f4-8b902af7b09d-kube-api-access-d2mp5\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625220 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-srv-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625241 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d760681-acd7-4c14-9510-74a753796ac9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625262 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-trusted-ca\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625288 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80abad70-6989-429d-9a1b-80163a0fdc2b-serving-cert\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625303 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-metrics-tls\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625323 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-metrics-tls\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625339 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.625425 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.125377274 +0000 UTC m=+138.052458691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625478 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-registration-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625499 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/65011ad9-c976-4071-b7eb-81f7e3c4975e-proxy-tls\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625526 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc60626-0001-47d7-a0f7-46450b095e84-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625547 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-webhook-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625571 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbvzg\" (UniqueName: \"kubernetes.io/projected/80abad70-6989-429d-9a1b-80163a0fdc2b-kube-api-access-hbvzg\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625593 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnhgw\" (UniqueName: \"kubernetes.io/projected/2440e229-6826-42f6-8e47-7026c3963cb2-kube-api-access-qnhgw\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625610 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-profile-collector-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625630 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d760681-acd7-4c14-9510-74a753796ac9-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625647 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-plugins-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625680 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwjmw\" (UniqueName: \"kubernetes.io/projected/7e3d25d9-8048-4adf-9294-e090ab509fdd-kube-api-access-jwjmw\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625709 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625729 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d760681-acd7-4c14-9510-74a753796ac9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625746 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-mountpoint-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625763 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffc60626-0001-47d7-a0f7-46450b095e84-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625787 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnzt7\" (UniqueName: \"kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625804 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp2hs\" (UniqueName: \"kubernetes.io/projected/08729539-55f5-4d1c-a952-9af42aa77b9c-kube-api-access-bp2hs\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.625819 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-csi-data-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.626010 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-csi-data-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.626073 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-images\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.627215 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2440e229-6826-42f6-8e47-7026c3963cb2-tmpfs\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.627428 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0d0b09-9413-4609-84a5-75665474cde3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.627656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-socket-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.627835 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/65011ad9-c976-4071-b7eb-81f7e3c4975e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.628010 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.628804 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.629111 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.629711 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.630690 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.630739 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-key\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.631233 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93085d90-0464-4c0c-9908-237d7cb85b24-service-ca-bundle\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.632129 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-config-volume\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.632334 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/21a47c79-31fd-4082-86f4-8b902af7b09d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.632507 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80abad70-6989-429d-9a1b-80163a0fdc2b-config\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.632757 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.634094 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d760681-acd7-4c14-9510-74a753796ac9-config\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.634152 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-plugins-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.634441 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.134427415 +0000 UTC m=+138.061508832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.634942 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-node-bootstrap-token\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.635467 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.635734 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4458fbe8-b293-43d8-8dd3-3a443b22191f-mountpoint-dir\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.636022 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc60626-0001-47d7-a0f7-46450b095e84-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.636382 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-signing-cabundle\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.637148 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-trusted-ca\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.638794 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/65011ad9-c976-4071-b7eb-81f7e3c4975e-proxy-tls\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.639197 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffc60626-0001-47d7-a0f7-46450b095e84-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.640906 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d760681-acd7-4c14-9510-74a753796ac9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.640917 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-metrics-tls\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.641162 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e82e35b-0155-4532-867b-b87d8c789575-cert\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.641256 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c7b107d8-6494-4f33-98d7-e97742749210-certs\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.641846 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/08729539-55f5-4d1c-a952-9af42aa77b9c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.642125 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.644670 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-metrics-tls\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.646961 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-default-certificate\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.648519 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80abad70-6989-429d-9a1b-80163a0fdc2b-serving-cert\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.648646 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.648728 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-metrics-tls\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.650656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-stats-auth\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.651357 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-proxy-tls\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.652742 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-profile-collector-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.652786 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b0d0b09-9413-4609-84a5-75665474cde3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.653583 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53099b1c-1857-463d-922b-e1d61ccda3fd-srv-cert\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.653628 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7e3d25d9-8048-4adf-9294-e090ab509fdd-srv-cert\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.653637 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/93085d90-0464-4c0c-9908-237d7cb85b24-metrics-certs\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.660667 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj662\" (UniqueName: \"kubernetes.io/projected/e8955ebb-0525-480b-8ab4-f2b9d808b69f-kube-api-access-kj662\") pod \"console-operator-58897d9998-wnjpd\" (UID: \"e8955ebb-0525-480b-8ab4-f2b9d808b69f\") " pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.660882 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2440e229-6826-42f6-8e47-7026c3963cb2-webhook-cert\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.662553 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.681656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhnlw\" (UniqueName: \"kubernetes.io/projected/54eaf19c-13a7-4d25-a623-a6e4eaa236d7-kube-api-access-jhnlw\") pod \"openshift-controller-manager-operator-756b6f6bc6-b29vn\" (UID: \"54eaf19c-13a7-4d25-a623-a6e4eaa236d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.699127 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghtx8\" (UniqueName: \"kubernetes.io/projected/69d514ec-24fc-4900-a812-fa1ca252b98f-kube-api-access-ghtx8\") pod \"cluster-samples-operator-665b6dd947-5st5z\" (UID: \"69d514ec-24fc-4900-a812-fa1ca252b98f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.709116 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.722739 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kl85\" (UniqueName: \"kubernetes.io/projected/a55c2bbe-e79b-41f2-9349-8d96592516d1-kube-api-access-8kl85\") pod \"apiserver-7bbb656c7d-9hnnb\" (UID: \"a55c2bbe-e79b-41f2-9349-8d96592516d1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.727193 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.727717 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.227677166 +0000 UTC m=+138.154758583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.739734 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jm8n\" (UniqueName: \"kubernetes.io/projected/57147412-f20a-4dbd-9337-4839c074baaf-kube-api-access-6jm8n\") pod \"openshift-config-operator-7777fb866f-zxqr5\" (UID: \"57147412-f20a-4dbd-9337-4839c074baaf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.763820 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5svcx\" (UniqueName: \"kubernetes.io/projected/c4211bad-89db-417b-acc1-5097f9b656cf-kube-api-access-5svcx\") pod \"migrator-59844c95c7-fs2n7\" (UID: \"c4211bad-89db-417b-acc1-5097f9b656cf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.767520 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.774774 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.805353 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fwvm\" (UniqueName: \"kubernetes.io/projected/ee1b682a-9dad-4a65-9f73-9bf26af43729-kube-api-access-8fwvm\") pod \"openshift-apiserver-operator-796bbdcf4f-b8cql\" (UID: \"ee1b682a-9dad-4a65-9f73-9bf26af43729\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.825174 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbwsw\" (UniqueName: \"kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw\") pod \"oauth-openshift-558db77b4-n7xgz\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.829931 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.830485 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.330469575 +0000 UTC m=+138.257550992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.840350 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sfqq\" (UniqueName: \"kubernetes.io/projected/1f6e0af9-aa12-497b-b39d-562dda0b4127-kube-api-access-8sfqq\") pod \"cluster-image-registry-operator-dc59b4c8b-ldq95\" (UID: \"1f6e0af9-aa12-497b-b39d-562dda0b4127\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.849521 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.858003 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdp2w\" (UniqueName: \"kubernetes.io/projected/84b94460-3d16-466f-bb73-b65f633d16d7-kube-api-access-gdp2w\") pod \"etcd-operator-b45778765-ts9nz\" (UID: \"84b94460-3d16-466f-bb73-b65f633d16d7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.868809 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.886907 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z22r7\" (UniqueName: \"kubernetes.io/projected/8f2fd72c-c9d0-400a-9658-b1a89365a32e-kube-api-access-z22r7\") pod \"apiserver-76f77b778f-q48jr\" (UID: \"8f2fd72c-c9d0-400a-9658-b1a89365a32e\") " pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.910577 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27x84\" (UniqueName: \"kubernetes.io/projected/79fbfc7e-85e3-4d27-8aec-c0157592c888-kube-api-access-27x84\") pod \"downloads-7954f5f757-nqllf\" (UID: \"79fbfc7e-85e3-4d27-8aec-c0157592c888\") " pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.917886 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.923705 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.925424 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qth4k\" (UniqueName: \"kubernetes.io/projected/878a6eea-f252-41c4-b674-2c0e4ade05f0-kube-api-access-qth4k\") pod \"machine-approver-56656f9798-xnrtm\" (UID: \"878a6eea-f252-41c4-b674-2c0e4ade05f0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.930938 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn"] Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.931430 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:57 crc kubenswrapper[4852]: E1201 20:06:57.931904 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.43188793 +0000 UTC m=+138.358969347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.933004 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.947785 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.962017 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.966016 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc60626-0001-47d7-a0f7-46450b095e84-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b5sxx\" (UID: \"ffc60626-0001-47d7-a0f7-46450b095e84\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.976838 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.984106 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk66z\" (UniqueName: \"kubernetes.io/projected/206eb8d6-f4cd-4410-b6e4-a0452c21a0f1-kube-api-access-xk66z\") pod \"machine-config-controller-84d6567774-7khwn\" (UID: \"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:57 crc kubenswrapper[4852]: I1201 20:06:57.986620 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.000908 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.013231 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zs5g\" (UniqueName: \"kubernetes.io/projected/53099b1c-1857-463d-922b-e1d61ccda3fd-kube-api-access-2zs5g\") pod \"olm-operator-6b444d44fb-k4r8z\" (UID: \"53099b1c-1857-463d-922b-e1d61ccda3fd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.017290 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.027088 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" event={"ID":"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b","Type":"ContainerStarted","Data":"c322d09b3c17282bd5b9d59cd49f4e58ecbdf05bc6e7e7e23eb6d8f09614c0d4"} Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.028800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvktw\" (UniqueName: \"kubernetes.io/projected/54c5c0b6-21b4-40de-b252-b8ae4b6da45e-kube-api-access-mvktw\") pod \"dns-operator-744455d44c-t5h7h\" (UID: \"54c5c0b6-21b4-40de-b252-b8ae4b6da45e\") " pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.036678 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.037048 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.537035754 +0000 UTC m=+138.464117171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.037439 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.038240 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.039524 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4lzf\" (UniqueName: \"kubernetes.io/projected/65011ad9-c976-4071-b7eb-81f7e3c4975e-kube-api-access-x4lzf\") pod \"machine-config-operator-74547568cd-x8r66\" (UID: \"65011ad9-c976-4071-b7eb-81f7e3c4975e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.045435 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.059262 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gxhz\" (UniqueName: \"kubernetes.io/projected/93085d90-0464-4c0c-9908-237d7cb85b24-kube-api-access-2gxhz\") pod \"router-default-5444994796-pvvl9\" (UID: \"93085d90-0464-4c0c-9908-237d7cb85b24\") " pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.072078 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/18202e52-4e9d-49b2-9214-8f9e55684bbb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8wlj7\" (UID: \"18202e52-4e9d-49b2-9214-8f9e55684bbb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:58 crc kubenswrapper[4852]: W1201 20:06:58.075509 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54eaf19c_13a7_4d25_a623_a6e4eaa236d7.slice/crio-0035f60437ab3cc0422f5138df693262ba30b7019ccb21e65bedfc459e5ce778 WatchSource:0}: Error finding container 0035f60437ab3cc0422f5138df693262ba30b7019ccb21e65bedfc459e5ce778: Status 404 returned error can't find the container with id 0035f60437ab3cc0422f5138df693262ba30b7019ccb21e65bedfc459e5ce778 Dec 01 20:06:58 crc kubenswrapper[4852]: W1201 20:06:58.076235 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab102fcf_71d9_40fc_9b9d_79b697e7864c.slice/crio-572753ab8cdd25c65fdea496572b08ac646fbc60aa3a06b32e8505a56960b8cd WatchSource:0}: Error finding container 572753ab8cdd25c65fdea496572b08ac646fbc60aa3a06b32e8505a56960b8cd: Status 404 returned error can't find the container with id 572753ab8cdd25c65fdea496572b08ac646fbc60aa3a06b32e8505a56960b8cd Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.077895 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.086769 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv2sk\" (UniqueName: \"kubernetes.io/projected/50c50fc4-d5f4-4d1f-96d1-1e510221cc28-kube-api-access-cv2sk\") pod \"dns-default-mwxfn\" (UID: \"50c50fc4-d5f4-4d1f-96d1-1e510221cc28\") " pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.093086 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.101377 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.102178 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnkdg\" (UniqueName: \"kubernetes.io/projected/1e82e35b-0155-4532-867b-b87d8c789575-kube-api-access-mnkdg\") pod \"ingress-canary-68zfl\" (UID: \"1e82e35b-0155-4532-867b-b87d8c789575\") " pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.126929 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh6mx\" (UniqueName: \"kubernetes.io/projected/c7b107d8-6494-4f33-98d7-e97742749210-kube-api-access-jh6mx\") pod \"machine-config-server-qht6p\" (UID: \"c7b107d8-6494-4f33-98d7-e97742749210\") " pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.136212 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.137446 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.137592 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.63756679 +0000 UTC m=+138.564648217 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.137896 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.138406 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.638385746 +0000 UTC m=+138.565467163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.145812 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctcpz\" (UniqueName: \"kubernetes.io/projected/20a3acb3-28ed-4ce0-a281-b9006b6e7fe6-kube-api-access-ctcpz\") pod \"service-ca-9c57cc56f-4k28k\" (UID: \"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6\") " pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.164969 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2mp5\" (UniqueName: \"kubernetes.io/projected/21a47c79-31fd-4082-86f4-8b902af7b09d-kube-api-access-d2mp5\") pod \"package-server-manager-789f6589d5-56wb9\" (UID: \"21a47c79-31fd-4082-86f4-8b902af7b09d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.173309 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qht6p" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.175401 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.193310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p6xx\" (UniqueName: \"kubernetes.io/projected/4b0d0b09-9413-4609-84a5-75665474cde3-kube-api-access-5p6xx\") pod \"kube-storage-version-migrator-operator-b67b599dd-hctzd\" (UID: \"4b0d0b09-9413-4609-84a5-75665474cde3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.200600 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg76t\" (UniqueName: \"kubernetes.io/projected/4458fbe8-b293-43d8-8dd3-3a443b22191f-kube-api-access-sg76t\") pod \"csi-hostpathplugin-7rzbn\" (UID: \"4458fbe8-b293-43d8-8dd3-3a443b22191f\") " pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.202156 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-mwxfn" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.208240 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-68zfl" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.223278 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.238960 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.239109 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.739066967 +0000 UTC m=+138.666148384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.239227 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.239579 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.739567713 +0000 UTC m=+138.666649130 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.243634 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz66h\" (UniqueName: \"kubernetes.io/projected/020cd112-6df2-4a1b-b6b9-0f01c5255fb1-kube-api-access-kz66h\") pod \"ingress-operator-5b745b69d9-t4mgg\" (UID: \"020cd112-6df2-4a1b-b6b9-0f01c5255fb1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.278530 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbvzg\" (UniqueName: \"kubernetes.io/projected/80abad70-6989-429d-9a1b-80163a0fdc2b-kube-api-access-hbvzg\") pod \"service-ca-operator-777779d784-t6knk\" (UID: \"80abad70-6989-429d-9a1b-80163a0fdc2b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.291590 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnhgw\" (UniqueName: \"kubernetes.io/projected/2440e229-6826-42f6-8e47-7026c3963cb2-kube-api-access-qnhgw\") pod \"packageserver-d55dfcdfc-fbhtp\" (UID: \"2440e229-6826-42f6-8e47-7026c3963cb2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.305915 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j7qf\" (UniqueName: \"kubernetes.io/projected/6f1d808e-f29f-40db-a8c6-f2af78c6ae2a-kube-api-access-4j7qf\") pod \"multus-admission-controller-857f4d67dd-mbbfj\" (UID: \"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.323419 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.331497 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.341743 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d760681-acd7-4c14-9510-74a753796ac9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cf7w4\" (UID: \"1d760681-acd7-4c14-9510-74a753796ac9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.354579 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.355389 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.356922 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.85689459 +0000 UTC m=+138.783976007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.363155 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.386955 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.399601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwjmw\" (UniqueName: \"kubernetes.io/projected/7e3d25d9-8048-4adf-9294-e090ab509fdd-kube-api-access-jwjmw\") pod \"catalog-operator-68c6474976-r2rjr\" (UID: \"7e3d25d9-8048-4adf-9294-e090ab509fdd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.401653 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnzt7\" (UniqueName: \"kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7\") pod \"collect-profiles-29410320-lm4qd\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.402347 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp2hs\" (UniqueName: \"kubernetes.io/projected/08729539-55f5-4d1c-a952-9af42aa77b9c-kube-api-access-bp2hs\") pod \"control-plane-machine-set-operator-78cbb6b69f-v6j29\" (UID: \"08729539-55f5-4d1c-a952-9af42aa77b9c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.404439 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlrpk\" (UniqueName: \"kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk\") pod \"marketplace-operator-79b997595-zfljp\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.405296 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb"] Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.412822 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.420514 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.428841 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.452331 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.453928 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.457046 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.457410 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:58.957396295 +0000 UTC m=+138.884477712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.457600 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.466045 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.493144 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.559334 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.560676 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.060650348 +0000 UTC m=+138.987731765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.564996 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.565504 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.065482373 +0000 UTC m=+138.992563790 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.667096 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.667597 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.16757128 +0000 UTC m=+139.094652697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.667807 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.668093 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.168079767 +0000 UTC m=+139.095161184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.668417 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.770406 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.770891 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.270861395 +0000 UTC m=+139.197942812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.872130 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.872657 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.37264083 +0000 UTC m=+139.299722247 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:58 crc kubenswrapper[4852]: I1201 20:06:58.974059 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:58 crc kubenswrapper[4852]: E1201 20:06:58.974439 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.474423307 +0000 UTC m=+139.401504724 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.039073 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qht6p" event={"ID":"c7b107d8-6494-4f33-98d7-e97742749210","Type":"ContainerStarted","Data":"090069058a40aae6640d42513b89185b3ebb8ba315684c16534e5fd5f210374e"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.039118 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qht6p" event={"ID":"c7b107d8-6494-4f33-98d7-e97742749210","Type":"ContainerStarted","Data":"a5e2f93c2d84a5cccd25a8fd062df3f5443f66b06ecb5995ca4ed881732a6621"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.051724 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-pvvl9" event={"ID":"93085d90-0464-4c0c-9908-237d7cb85b24","Type":"ContainerStarted","Data":"ae787a922f24b7b812cbd96d298de4c9167a652e559147f40f6422a4d837b408"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.051767 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-pvvl9" event={"ID":"93085d90-0464-4c0c-9908-237d7cb85b24","Type":"ContainerStarted","Data":"92a46193f1f407e490f29e991f071e37d942b6c14df958efaad9c1b36a12c899"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.057737 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" event={"ID":"878a6eea-f252-41c4-b674-2c0e4ade05f0","Type":"ContainerStarted","Data":"7d5d770b79017e14a0cc756365b523374a33edf8c6d8e30bb36b78de5538401d"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.076706 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.078126 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.578092714 +0000 UTC m=+139.505174131 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.096930 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.105419 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" event={"ID":"54eaf19c-13a7-4d25-a623-a6e4eaa236d7","Type":"ContainerStarted","Data":"5e8d8538c518b3a425c153ae164cf67b2525f7d4cf032b929f1fb68f0ac86ba5"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.105504 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" event={"ID":"54eaf19c-13a7-4d25-a623-a6e4eaa236d7","Type":"ContainerStarted","Data":"0035f60437ab3cc0422f5138df693262ba30b7019ccb21e65bedfc459e5ce778"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.127708 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-glnwk" event={"ID":"ab102fcf-71d9-40fc-9b9d-79b697e7864c","Type":"ContainerStarted","Data":"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.127762 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-glnwk" event={"ID":"ab102fcf-71d9-40fc-9b9d-79b697e7864c","Type":"ContainerStarted","Data":"572753ab8cdd25c65fdea496572b08ac646fbc60aa3a06b32e8505a56960b8cd"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.146471 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" event={"ID":"a55c2bbe-e79b-41f2-9349-8d96592516d1","Type":"ContainerStarted","Data":"0214aaaf023658a58ff39768c23002362c8dfeec01aab2570c721187e84d9683"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.173202 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" event={"ID":"fbd0c143-fbca-4526-bfa9-2ea7c0b9f96b","Type":"ContainerStarted","Data":"334bddcbb36ca3e44d38ab4cfb0975cc4afd97ce53cc558a29756ab373915abf"} Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.177908 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.179537 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.679516358 +0000 UTC m=+139.606597785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.220403 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b29vn" podStartSLOduration=119.220385414 podStartE2EDuration="1m59.220385414s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.220238759 +0000 UTC m=+139.147320186" watchObservedRunningTime="2025-12-01 20:06:59.220385414 +0000 UTC m=+139.147466831" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.253987 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.280045 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.280818 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.780798709 +0000 UTC m=+139.707880146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.305510 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:06:59 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:06:59 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:06:59 crc kubenswrapper[4852]: healthz check failed Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.305569 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.358462 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" podStartSLOduration=119.358421877 podStartE2EDuration="1m59.358421877s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.357305631 +0000 UTC m=+139.284387048" watchObservedRunningTime="2025-12-01 20:06:59.358421877 +0000 UTC m=+139.285503294" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.395775 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.396463 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.896393829 +0000 UTC m=+139.823475256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.459798 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-pvvl9" podStartSLOduration=119.45978397 podStartE2EDuration="1m59.45978397s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.457913129 +0000 UTC m=+139.384994546" watchObservedRunningTime="2025-12-01 20:06:59.45978397 +0000 UTC m=+139.386865387" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.497358 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.497742 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:06:59.997725781 +0000 UTC m=+139.924807198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.598884 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.599063 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.099041322 +0000 UTC m=+140.026122739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.599190 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.599508 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.099500977 +0000 UTC m=+140.026582384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.678267 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-glnwk" podStartSLOduration=119.67822945099999 podStartE2EDuration="1m59.678229451s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.643292266 +0000 UTC m=+139.570373693" watchObservedRunningTime="2025-12-01 20:06:59.678229451 +0000 UTC m=+139.605310868" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.705093 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.705615 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.205596582 +0000 UTC m=+140.132677999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.808066 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.808466 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.308434472 +0000 UTC m=+140.235515889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.897439 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-tb6ll" podStartSLOduration=119.897421716 podStartE2EDuration="1m59.897421716s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.861876112 +0000 UTC m=+139.788957529" watchObservedRunningTime="2025-12-01 20:06:59.897421716 +0000 UTC m=+139.824503133" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.911127 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:06:59 crc kubenswrapper[4852]: E1201 20:06:59.911517 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.411502669 +0000 UTC m=+140.338584086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.979597 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-qht6p" podStartSLOduration=4.97956829 podStartE2EDuration="4.97956829s" podCreationTimestamp="2025-12-01 20:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:06:59.977144022 +0000 UTC m=+139.904225429" watchObservedRunningTime="2025-12-01 20:06:59.97956829 +0000 UTC m=+139.906649707" Dec 01 20:06:59 crc kubenswrapper[4852]: I1201 20:06:59.991054 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.020332 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.021079 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.521061976 +0000 UTC m=+140.448143383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.039521 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.042244 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-nqllf"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.044307 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.047900 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-t5h7h"] Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.053017 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54c5c0b6_21b4_40de_b252_b8ae4b6da45e.slice/crio-1a0f6b025450bfebd5f805a5c9822c91f01fb82ce68f93429bae5ccdcc6e450f WatchSource:0}: Error finding container 1a0f6b025450bfebd5f805a5c9822c91f01fb82ce68f93429bae5ccdcc6e450f: Status 404 returned error can't find the container with id 1a0f6b025450bfebd5f805a5c9822c91f01fb82ce68f93429bae5ccdcc6e450f Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.058356 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ts9nz"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.061346 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.064343 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wnjpd"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.067858 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.069207 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-2ccf2" podStartSLOduration=120.069197285 podStartE2EDuration="2m0.069197285s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:00.057821629 +0000 UTC m=+139.984903046" watchObservedRunningTime="2025-12-01 20:07:00.069197285 +0000 UTC m=+139.996278702" Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.070276 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8955ebb_0525_480b_8ab4_f2b9d808b69f.slice/crio-dddfe867ea4c1da487490037101ba27e1f3b57bbc97354c21d4b284a4961fa39 WatchSource:0}: Error finding container dddfe867ea4c1da487490037101ba27e1f3b57bbc97354c21d4b284a4961fa39: Status 404 returned error can't find the container with id dddfe867ea4c1da487490037101ba27e1f3b57bbc97354c21d4b284a4961fa39 Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.072205 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4211bad_89db_417b_acc1_5097f9b656cf.slice/crio-7f2f9e71455a6b4058db02286de28f9a82711f3ce23962852288de46cfc3b657 WatchSource:0}: Error finding container 7f2f9e71455a6b4058db02286de28f9a82711f3ce23962852288de46cfc3b657: Status 404 returned error can't find the container with id 7f2f9e71455a6b4058db02286de28f9a82711f3ce23962852288de46cfc3b657 Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.098765 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:00 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:00 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:00 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.098828 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.128792 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.129068 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.629037402 +0000 UTC m=+140.556118819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.129218 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.129649 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.629641731 +0000 UTC m=+140.556723138 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.187348 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.194767 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-68zfl"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.196163 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-mwxfn"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.198032 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.206556 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql"] Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.211551 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e82e35b_0155_4532_867b_b87d8c789575.slice/crio-bbcca836780c85344aff638cd2a7fe53bdb54a5b7c8160e2900d6633fdeec4e1 WatchSource:0}: Error finding container bbcca836780c85344aff638cd2a7fe53bdb54a5b7c8160e2900d6633fdeec4e1: Status 404 returned error can't find the container with id bbcca836780c85344aff638cd2a7fe53bdb54a5b7c8160e2900d6633fdeec4e1 Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.211891 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" event={"ID":"878a6eea-f252-41c4-b674-2c0e4ade05f0","Type":"ContainerStarted","Data":"9b3021a90b7f26d8c7d903eabc431ef3e59d6bb8230319c016b09f742fb9171d"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.211926 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" event={"ID":"878a6eea-f252-41c4-b674-2c0e4ade05f0","Type":"ContainerStarted","Data":"9c0b5ac087a63483829f4abed643bfbee45756e9a3eebbb9d77b7ac171c517a5"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.216050 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" event={"ID":"c4211bad-89db-417b-acc1-5097f9b656cf","Type":"ContainerStarted","Data":"7f2f9e71455a6b4058db02286de28f9a82711f3ce23962852288de46cfc3b657"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.233113 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.233417 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.73338546 +0000 UTC m=+140.660466877 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.236690 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.237526 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.248742 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" event={"ID":"84b94460-3d16-466f-bb73-b65f633d16d7","Type":"ContainerStarted","Data":"9bd21b6a61a1533e7bd0822f05905ecc09c40699149435ddff88ef6d57f0d11b"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.266961 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" event={"ID":"69d514ec-24fc-4900-a812-fa1ca252b98f","Type":"ContainerStarted","Data":"e89aa5125285931b551a765ea6d21bb5216abe64a47bc53d8aa3586cfa38f624"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.269512 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" event={"ID":"1f6e0af9-aa12-497b-b39d-562dda0b4127","Type":"ContainerStarted","Data":"7deddc5adbe5fe339396d40cbec0b963f4205cb399e86b4a3404993c752a2020"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.273170 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.274667 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xnrtm" podStartSLOduration=121.274645009 podStartE2EDuration="2m1.274645009s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:00.260662179 +0000 UTC m=+140.187743606" watchObservedRunningTime="2025-12-01 20:07:00.274645009 +0000 UTC m=+140.201726426" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.283862 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" event={"ID":"ac2e2b97-df99-4373-b8f4-990f66fdc01b","Type":"ContainerStarted","Data":"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.283912 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" event={"ID":"ac2e2b97-df99-4373-b8f4-990f66fdc01b","Type":"ContainerStarted","Data":"f13f39106cc7bee9f49c7b653fb9c1f9ea50cd1d5d9277fa6f4e7bff6908db4e"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.284846 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.287646 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" event={"ID":"e8955ebb-0525-480b-8ab4-f2b9d808b69f","Type":"ContainerStarted","Data":"dddfe867ea4c1da487490037101ba27e1f3b57bbc97354c21d4b284a4961fa39"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.298541 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.299089 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" podStartSLOduration=120.299067175 podStartE2EDuration="2m0.299067175s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:00.289365482 +0000 UTC m=+140.216446899" watchObservedRunningTime="2025-12-01 20:07:00.299067175 +0000 UTC m=+140.226148592" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.310883 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q48jr"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.313393 4852 generic.go:334] "Generic (PLEG): container finished" podID="a55c2bbe-e79b-41f2-9349-8d96592516d1" containerID="d2ce3beeddd037cf6ca4272a11ef5a22a10bf0ee68db81a6d8fd165a5b49c25f" exitCode=0 Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.313507 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" event={"ID":"a55c2bbe-e79b-41f2-9349-8d96592516d1","Type":"ContainerDied","Data":"d2ce3beeddd037cf6ca4272a11ef5a22a10bf0ee68db81a6d8fd165a5b49c25f"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.318479 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" podStartSLOduration=120.318446778 podStartE2EDuration="2m0.318446778s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:00.314624335 +0000 UTC m=+140.241705742" watchObservedRunningTime="2025-12-01 20:07:00.318446778 +0000 UTC m=+140.245528205" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.353992 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.361071 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.86104979 +0000 UTC m=+140.788131207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.389840 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65011ad9_c976_4071_b7eb_81f7e3c4975e.slice/crio-2afb3f19d3a214ec3ddaaf9d1e2bf973c83277927839805214fd1d5cdcff9afc WatchSource:0}: Error finding container 2afb3f19d3a214ec3ddaaf9d1e2bf973c83277927839805214fd1d5cdcff9afc: Status 404 returned error can't find the container with id 2afb3f19d3a214ec3ddaaf9d1e2bf973c83277927839805214fd1d5cdcff9afc Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.417511 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" event={"ID":"54c5c0b6-21b4-40de-b252-b8ae4b6da45e","Type":"ContainerStarted","Data":"1a0f6b025450bfebd5f805a5c9822c91f01fb82ce68f93429bae5ccdcc6e450f"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.417550 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" event={"ID":"57147412-f20a-4dbd-9337-4839c074baaf","Type":"ContainerStarted","Data":"8f3d171b550523ef5e1e6b9aab816a354b62b6810071ada80a551d2adc60be7e"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.423477 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" event={"ID":"ffc60626-0001-47d7-a0f7-46450b095e84","Type":"ContainerStarted","Data":"e21ae39558b0f285ede3cdf78fe41094144f2c4b72309324b6a5d242af2cd22b"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.440635 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-nqllf" event={"ID":"79fbfc7e-85e3-4d27-8aec-c0157592c888","Type":"ContainerStarted","Data":"8a14cff430e6eb3fb5defa07250c229f47d3091fbdba3427b0df35f8cfc69885"} Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.441548 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.441678 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.444018 4852 patch_prober.go:28] interesting pod/downloads-7954f5f757-nqllf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.444068 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-nqllf" podUID="79fbfc7e-85e3-4d27-8aec-c0157592c888" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.454716 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.455852 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:00.955832271 +0000 UTC m=+140.882913688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.491446 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.491510 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-t6knk"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.537516 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.546750 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.548056 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.551139 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mbbfj"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.551918 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.557194 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.559017 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.058995351 +0000 UTC m=+140.986076768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.626868 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08729539_55f5_4d1c_a952_9af42aa77b9c.slice/crio-d0ac828091f432371d3e582a4c6e56124e81c5f16ab0cfe592bbe6a8a7b680ec WatchSource:0}: Error finding container d0ac828091f432371d3e582a4c6e56124e81c5f16ab0cfe592bbe6a8a7b680ec: Status 404 returned error can't find the container with id d0ac828091f432371d3e582a4c6e56124e81c5f16ab0cfe592bbe6a8a7b680ec Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.649025 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zfljp"] Dec 01 20:07:00 crc kubenswrapper[4852]: W1201 20:07:00.652386 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod020cd112_6df2_4a1b_b6b9_0f01c5255fb1.slice/crio-560619cdcca3fdeb99d9de1068c02b983063d1a8b6c5ab2cc2bb4d428a0dec0d WatchSource:0}: Error finding container 560619cdcca3fdeb99d9de1068c02b983063d1a8b6c5ab2cc2bb4d428a0dec0d: Status 404 returned error can't find the container with id 560619cdcca3fdeb99d9de1068c02b983063d1a8b6c5ab2cc2bb4d428a0dec0d Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.658760 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.658929 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.158901487 +0000 UTC m=+141.085982904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.659177 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.659488 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.159478185 +0000 UTC m=+141.086559612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.678246 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.734136 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7rzbn"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.751197 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4k28k"] Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.759807 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.760115 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.260099604 +0000 UTC m=+141.187181021 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.860857 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.861711 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.361697095 +0000 UTC m=+141.288778512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.893847 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.964057 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.964494 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.464464602 +0000 UTC m=+141.391546019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:00 crc kubenswrapper[4852]: I1201 20:07:00.964771 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:00 crc kubenswrapper[4852]: E1201 20:07:00.965674 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.465659331 +0000 UTC m=+141.392740748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.038210 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-nqllf" podStartSLOduration=121.038190296 podStartE2EDuration="2m1.038190296s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.036748139 +0000 UTC m=+140.963829556" watchObservedRunningTime="2025-12-01 20:07:01.038190296 +0000 UTC m=+140.965271733" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.066086 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.066549 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.566531558 +0000 UTC m=+141.493612975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.103180 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:01 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:01 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:01 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.103225 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.170129 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.170512 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.670498945 +0000 UTC m=+141.597580362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.275150 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.275471 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.775442112 +0000 UTC m=+141.702523529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.376846 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.377264 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.877243769 +0000 UTC m=+141.804325176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.469768 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" event={"ID":"c4211bad-89db-417b-acc1-5097f9b656cf","Type":"ContainerStarted","Data":"b5c54e7e63c54a50d1956a5f3c0dfccc12c4af312f7de6398aaaabb170e3c5f1"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.469810 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" event={"ID":"c4211bad-89db-417b-acc1-5097f9b656cf","Type":"ContainerStarted","Data":"2e181a5d9ee65738243053181dce2509249a21823e3daee358344b66f07805c3"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.472648 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" event={"ID":"80abad70-6989-429d-9a1b-80163a0fdc2b","Type":"ContainerStarted","Data":"6adce985032934db5ab4c549e7c9aefa630e36a0cf5bf3fb289e2eedeceb283f"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.480811 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" event={"ID":"69d514ec-24fc-4900-a812-fa1ca252b98f","Type":"ContainerStarted","Data":"ef0dff74a01fade3a2ca33bdc744c5864ea25ea55502c8de9ca2cce98e49e283"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.480919 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.481144 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.981126993 +0000 UTC m=+141.908208410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.481533 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.481894 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:01.981882107 +0000 UTC m=+141.908963524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.482631 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" event={"ID":"53099b1c-1857-463d-922b-e1d61ccda3fd","Type":"ContainerStarted","Data":"c868a94635ed2759fedc56d8a2bec511826209e471f5f570b7ffad5a5c193c48"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.482668 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" event={"ID":"53099b1c-1857-463d-922b-e1d61ccda3fd","Type":"ContainerStarted","Data":"bdf6ecee12e0a1c60db78492f812dab029c39365b175bd948e75d5c1f40b02b7"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.483407 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.495151 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" event={"ID":"54c5c0b6-21b4-40de-b252-b8ae4b6da45e","Type":"ContainerStarted","Data":"1f507e56fa76f2e4d616dcb3786a2848c38a5390d3785998bb7bc3526ebcf252"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.495798 4852 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-k4r8z container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.495912 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" podUID="53099b1c-1857-463d-922b-e1d61ccda3fd" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.502030 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fs2n7" podStartSLOduration=121.502003395 podStartE2EDuration="2m1.502003395s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.498873134 +0000 UTC m=+141.425954561" watchObservedRunningTime="2025-12-01 20:07:01.502003395 +0000 UTC m=+141.429084812" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.512002 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" event={"ID":"ff119331-8296-43c6-abd8-2da90ba021a6","Type":"ContainerStarted","Data":"9b031c7691f14bacb7db44b38774722f2b0de48b6c3f15c2f14485756f19f5e4"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.514300 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" event={"ID":"65c26ddd-9a26-4b9c-b3fa-74827d33872a","Type":"ContainerStarted","Data":"c41583862bbb77359b155d266200f6ec2c849492a0000dced631240c80f08f75"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.528368 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" event={"ID":"18202e52-4e9d-49b2-9214-8f9e55684bbb","Type":"ContainerStarted","Data":"88054c4f4aa581d2656ef239c2609a4f7c4a091da98d9311f30c290d93c6f411"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.536252 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" podStartSLOduration=121.536233827 podStartE2EDuration="2m1.536233827s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.536008579 +0000 UTC m=+141.463089996" watchObservedRunningTime="2025-12-01 20:07:01.536233827 +0000 UTC m=+141.463315244" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.538836 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" event={"ID":"ffc60626-0001-47d7-a0f7-46450b095e84","Type":"ContainerStarted","Data":"31a3012cfe1740bec56dbfd5396b61559ec7f18cd15a6edb0923cf981f257e1d"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.551003 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" event={"ID":"08729539-55f5-4d1c-a952-9af42aa77b9c","Type":"ContainerStarted","Data":"d0ac828091f432371d3e582a4c6e56124e81c5f16ab0cfe592bbe6a8a7b680ec"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.566227 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b5sxx" podStartSLOduration=121.566212811 podStartE2EDuration="2m1.566212811s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.564898489 +0000 UTC m=+141.491979916" watchObservedRunningTime="2025-12-01 20:07:01.566212811 +0000 UTC m=+141.493294228" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.578760 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" event={"ID":"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1","Type":"ContainerStarted","Data":"65383b39beb5fb33dcbaefe20340e3eded95bab17f502f6fb2f45ed758561ef1"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.578812 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" event={"ID":"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1","Type":"ContainerStarted","Data":"8957f821b9043dbbd888b6544536328712b2392e591793f82ce50be5a2408553"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.584380 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.584657 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.084634175 +0000 UTC m=+142.011715592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.585640 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.586346 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.086328539 +0000 UTC m=+142.013409956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.603427 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" event={"ID":"65011ad9-c976-4071-b7eb-81f7e3c4975e","Type":"ContainerStarted","Data":"f967c70cb8f0497d4fcd10a99b7ed38e8fd68b12c64aa41d76529c775c7310de"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.607539 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" event={"ID":"65011ad9-c976-4071-b7eb-81f7e3c4975e","Type":"ContainerStarted","Data":"2afb3f19d3a214ec3ddaaf9d1e2bf973c83277927839805214fd1d5cdcff9afc"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.681519 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-nqllf" event={"ID":"79fbfc7e-85e3-4d27-8aec-c0157592c888","Type":"ContainerStarted","Data":"07e2fc50bf5bd108ff1ee1ddede80bb3eeb4affcba79343596ac84243405c19f"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.686563 4852 patch_prober.go:28] interesting pod/downloads-7954f5f757-nqllf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.686620 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-nqllf" podUID="79fbfc7e-85e3-4d27-8aec-c0157592c888" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.692642 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.692959 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.192944311 +0000 UTC m=+142.120025728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.693041 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.694380 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.194372397 +0000 UTC m=+142.121453814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.695426 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" event={"ID":"4458fbe8-b293-43d8-8dd3-3a443b22191f","Type":"ContainerStarted","Data":"1b60df58ec8f1ab783c7bfe242a84fc99763e2f667b1ee00f105efe32c670b06"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.710816 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" event={"ID":"7e3d25d9-8048-4adf-9294-e090ab509fdd","Type":"ContainerStarted","Data":"5800634cc8781617539de032b7c2eb1d5d38082a2408445bac04cb61b11a285a"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.758175 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" event={"ID":"7e3d25d9-8048-4adf-9294-e090ab509fdd","Type":"ContainerStarted","Data":"6bd4af69fa99fd380ac390f378f876e898468a0070c5df0a827b7fc495487d16"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.760437 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.771078 4852 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-r2rjr container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.771686 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" podUID="7e3d25d9-8048-4adf-9294-e090ab509fdd" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.797970 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.798649 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.298632972 +0000 UTC m=+142.225714389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.800815 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" podStartSLOduration=121.800785693 podStartE2EDuration="2m1.800785693s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.79512408 +0000 UTC m=+141.722205497" watchObservedRunningTime="2025-12-01 20:07:01.800785693 +0000 UTC m=+141.727867110" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.828192 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" event={"ID":"2440e229-6826-42f6-8e47-7026c3963cb2","Type":"ContainerStarted","Data":"5ce42f79676156c94d5cd695557d2af902d4a222f1351d11462f5c337922f8fb"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.828239 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" event={"ID":"2440e229-6826-42f6-8e47-7026c3963cb2","Type":"ContainerStarted","Data":"64d94fb3289a7dab6011ce4b59c7df4340a1b7ad6a0f178500ded2a95c8da976"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.829263 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.855192 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mwxfn" event={"ID":"50c50fc4-d5f4-4d1f-96d1-1e510221cc28","Type":"ContainerStarted","Data":"a0f5fe8a1afdb60a165032ca72ca9c3b39a77a1ccb69ba37aab95fa0df07c864"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.855237 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mwxfn" event={"ID":"50c50fc4-d5f4-4d1f-96d1-1e510221cc28","Type":"ContainerStarted","Data":"2455fd99be3043cabacaebbaad0f0263a1cd075fc11138a6cec69e5ef24cc3c7"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.865189 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" podStartSLOduration=121.865150494 podStartE2EDuration="2m1.865150494s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.864128901 +0000 UTC m=+141.791210338" watchObservedRunningTime="2025-12-01 20:07:01.865150494 +0000 UTC m=+141.792231911" Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.892702 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" event={"ID":"21a47c79-31fd-4082-86f4-8b902af7b09d","Type":"ContainerStarted","Data":"7e115cac649effb64bdac8e30ea86a9de1e5a55c1f2237343a8389c2b3761167"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.899060 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:01 crc kubenswrapper[4852]: E1201 20:07:01.899362 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.399350334 +0000 UTC m=+142.326431751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.934621 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" event={"ID":"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6","Type":"ContainerStarted","Data":"59699a55488dbc539e758e6cb3fc93b5cfb3d1c18c0a8d92cfacd091722d4b10"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.968612 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" event={"ID":"b7b65e8c-0f7e-441d-9183-2090247908eb","Type":"ContainerStarted","Data":"528c09c7770156adb5e1950a8f8a6ec3c1ef409d48041bee65cf25fb25395f55"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.996362 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" event={"ID":"ee1b682a-9dad-4a65-9f73-9bf26af43729","Type":"ContainerStarted","Data":"d5af5780b8dad799672d7456658a2712887dc19bbd09ed499346e5ff5a04769c"} Dec 01 20:07:01 crc kubenswrapper[4852]: I1201 20:07:01.996431 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" event={"ID":"ee1b682a-9dad-4a65-9f73-9bf26af43729","Type":"ContainerStarted","Data":"4ceba23ed642654f90da413b2a2fdd89b34c952e977619c51b85e0d0635db9e4"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.001343 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.001862 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.501834434 +0000 UTC m=+142.428915851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.003003 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" event={"ID":"4b0d0b09-9413-4609-84a5-75665474cde3","Type":"ContainerStarted","Data":"cf52bf9e84228d5f78aa3acdbd9b0be7e664c5aa6f2de5a3804f4e5130c3da52"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.004373 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" podStartSLOduration=123.004335734 podStartE2EDuration="2m3.004335734s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:01.994829288 +0000 UTC m=+141.921910705" watchObservedRunningTime="2025-12-01 20:07:02.004335734 +0000 UTC m=+141.931417261" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.012735 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" event={"ID":"1d760681-acd7-4c14-9510-74a753796ac9","Type":"ContainerStarted","Data":"474de29afccdbed110617937ca7210e022c4a66e692ffb1ab82f27995b989521"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.037096 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b8cql" podStartSLOduration=122.037079188 podStartE2EDuration="2m2.037079188s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:02.036331453 +0000 UTC m=+141.963412870" watchObservedRunningTime="2025-12-01 20:07:02.037079188 +0000 UTC m=+141.964160605" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.046734 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-68zfl" event={"ID":"1e82e35b-0155-4532-867b-b87d8c789575","Type":"ContainerStarted","Data":"61a9b54387ffdec0b6a4ea969c94b1e5c0bfd575dd168b2c769c8cb3c4a69693"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.046780 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-68zfl" event={"ID":"1e82e35b-0155-4532-867b-b87d8c789575","Type":"ContainerStarted","Data":"bbcca836780c85344aff638cd2a7fe53bdb54a5b7c8160e2900d6633fdeec4e1"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.064506 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ldq95" event={"ID":"1f6e0af9-aa12-497b-b39d-562dda0b4127","Type":"ContainerStarted","Data":"02948ed42bec00ee82a30b58f00f3123a003354762db5c8db5b8e63b9c68274d"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.084881 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-68zfl" podStartSLOduration=7.084864196 podStartE2EDuration="7.084864196s" podCreationTimestamp="2025-12-01 20:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:02.084141952 +0000 UTC m=+142.011223369" watchObservedRunningTime="2025-12-01 20:07:02.084864196 +0000 UTC m=+142.011945613" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.086166 4852 generic.go:334] "Generic (PLEG): container finished" podID="57147412-f20a-4dbd-9337-4839c074baaf" containerID="362d6305d1f3c846595596d6cc505405306e9b7930848c384e5c52f277957fac" exitCode=0 Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.086243 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" event={"ID":"57147412-f20a-4dbd-9337-4839c074baaf","Type":"ContainerDied","Data":"362d6305d1f3c846595596d6cc505405306e9b7930848c384e5c52f277957fac"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.103724 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.104195 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.604175377 +0000 UTC m=+142.531256794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.111364 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" event={"ID":"020cd112-6df2-4a1b-b6b9-0f01c5255fb1","Type":"ContainerStarted","Data":"560619cdcca3fdeb99d9de1068c02b983063d1a8b6c5ab2cc2bb4d428a0dec0d"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.111474 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:02 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:02 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:02 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.111521 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.118607 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" event={"ID":"84b94460-3d16-466f-bb73-b65f633d16d7","Type":"ContainerStarted","Data":"402e59016a48a466c35cbf7e69061a18f1a238e24732c506bd4a83182114a988"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.155077 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-ts9nz" podStartSLOduration=122.155061145 podStartE2EDuration="2m2.155061145s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:02.153196386 +0000 UTC m=+142.080277793" watchObservedRunningTime="2025-12-01 20:07:02.155061145 +0000 UTC m=+142.082142562" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.185785 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" event={"ID":"e8955ebb-0525-480b-8ab4-f2b9d808b69f","Type":"ContainerStarted","Data":"c89835baf8f15e278804447bc8d21a0f7078044cbf98339231eda92ee73d4531"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.187004 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.204743 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.210887 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.708044711 +0000 UTC m=+142.635126128 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.217898 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" event={"ID":"8f2fd72c-c9d0-400a-9658-b1a89365a32e","Type":"ContainerStarted","Data":"5db0a80fa456ef5af108120d8387f53f093a062a2f80bfc9511e5d55f94cf0e9"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.227708 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" event={"ID":"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a","Type":"ContainerStarted","Data":"ee3e93cad1a593b962e33bc7ef575a29f9803c77ed07d26c04f7ed98d74fc729"} Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.237703 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.245663 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-wnjpd" podStartSLOduration=122.245638661 podStartE2EDuration="2m2.245638661s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:02.228250162 +0000 UTC m=+142.155331579" watchObservedRunningTime="2025-12-01 20:07:02.245638661 +0000 UTC m=+142.172720078" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.308935 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.312674 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.812651798 +0000 UTC m=+142.739733215 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.410725 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.411093 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:02.911077117 +0000 UTC m=+142.838158534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.512527 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.513128 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.0131046 +0000 UTC m=+142.940186017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.616411 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.617427 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.117396187 +0000 UTC m=+143.044477604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.718971 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.719279 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.219267316 +0000 UTC m=+143.146348733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.732640 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbhtp" Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.828000 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.831114 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.331075076 +0000 UTC m=+143.258156493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.837381 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.839714 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.339695062 +0000 UTC m=+143.266776479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:02 crc kubenswrapper[4852]: I1201 20:07:02.944938 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:02 crc kubenswrapper[4852]: E1201 20:07:02.945284 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.445269621 +0000 UTC m=+143.372351038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.049436 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.051215 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.55119331 +0000 UTC m=+143.478274727 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.109738 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:03 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:03 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:03 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.109806 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.156164 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.156843 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.65682058 +0000 UTC m=+143.583901997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.257742 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" event={"ID":"ff119331-8296-43c6-abd8-2da90ba021a6","Type":"ContainerStarted","Data":"951cb486a4034688f180d2c2c1a28f77b64156bbd2e4851f2af7cd7809737cf1"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.258751 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.260022 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.260270 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.76026005 +0000 UTC m=+143.687341467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.263834 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" event={"ID":"65011ad9-c976-4071-b7eb-81f7e3c4975e","Type":"ContainerStarted","Data":"57bb49f3be18fda20f75839818f03b7e7199766e97a3b2c2ff39a48c2981e181"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.289760 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mwxfn" event={"ID":"50c50fc4-d5f4-4d1f-96d1-1e510221cc28","Type":"ContainerStarted","Data":"8274727b698b3da557ce4bf51b932021915b159f852424ad177104829c821e40"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.290399 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-mwxfn" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.310923 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" event={"ID":"80abad70-6989-429d-9a1b-80163a0fdc2b","Type":"ContainerStarted","Data":"f88b9eb5def4d7126d9ea79f409982381993e38fd587ff0024b16b78e831f0ab"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.326774 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" podStartSLOduration=123.32675826 podStartE2EDuration="2m3.32675826s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.326022377 +0000 UTC m=+143.253103784" watchObservedRunningTime="2025-12-01 20:07:03.32675826 +0000 UTC m=+143.253839677" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.327944 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" event={"ID":"69d514ec-24fc-4900-a812-fa1ca252b98f","Type":"ContainerStarted","Data":"bacee0a822c74f2800d4a5c2bb8e9cbbc2aa8722f252b9c8a4f918972ca53eef"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.340791 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" event={"ID":"18202e52-4e9d-49b2-9214-8f9e55684bbb","Type":"ContainerStarted","Data":"6951fb016f0f1572a953da292efba878d3fa6559a644db7574e47fc016e7539c"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.360732 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" event={"ID":"b7b65e8c-0f7e-441d-9183-2090247908eb","Type":"ContainerStarted","Data":"74faa56205cc80d24ee9cf4e3670a5d3848e0467f1fc9fb0e4df69104019b683"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.360771 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.361132 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.861098475 +0000 UTC m=+143.788179882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.371209 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" event={"ID":"54c5c0b6-21b4-40de-b252-b8ae4b6da45e","Type":"ContainerStarted","Data":"4992738ac9f91eae53e3e72f547d58e387d055ba5acb9b27df9620707929c14a"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.380110 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" event={"ID":"08729539-55f5-4d1c-a952-9af42aa77b9c","Type":"ContainerStarted","Data":"88489769a9b7ed3dcc3b7947bc7e9f17b9ea1f40639ecf10ec211ca7b7fde995"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.412365 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" event={"ID":"206eb8d6-f4cd-4410-b6e4-a0452c21a0f1","Type":"ContainerStarted","Data":"38cb73abb2551c41c2e9ea9c6142a948e98e6fcd3acc2808fbbc1a23370ebf5d"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.418807 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-t6knk" podStartSLOduration=123.418789083 podStartE2EDuration="2m3.418789083s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.389027574 +0000 UTC m=+143.316109011" watchObservedRunningTime="2025-12-01 20:07:03.418789083 +0000 UTC m=+143.345870500" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.439291 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" event={"ID":"020cd112-6df2-4a1b-b6b9-0f01c5255fb1","Type":"ContainerStarted","Data":"f896ec0d1131e57029c4019ce0376f68039b344a6ef260c74afbc15c8c25ce81"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.439349 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" event={"ID":"020cd112-6df2-4a1b-b6b9-0f01c5255fb1","Type":"ContainerStarted","Data":"99642b0a4b02af0486153e28fd92a4d7633fa37f496c4914108feda681938381"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.440949 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" event={"ID":"4b0d0b09-9413-4609-84a5-75665474cde3","Type":"ContainerStarted","Data":"b05bc03d38d04a78a5b8c2da4332461dbf4e5b1ab2ee0756908a7feff6a0c128"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.447534 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" event={"ID":"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a","Type":"ContainerStarted","Data":"1b6bbd4aa8ec6624c859ddbcd71c7817e820ea5db46dae1ba8108ea2cb0747d0"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.455425 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x8r66" podStartSLOduration=123.455402442 podStartE2EDuration="2m3.455402442s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.419608979 +0000 UTC m=+143.346690396" watchObservedRunningTime="2025-12-01 20:07:03.455402442 +0000 UTC m=+143.382483859" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.457916 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-mwxfn" podStartSLOduration=8.457905822 podStartE2EDuration="8.457905822s" podCreationTimestamp="2025-12-01 20:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.454853914 +0000 UTC m=+143.381935331" watchObservedRunningTime="2025-12-01 20:07:03.457905822 +0000 UTC m=+143.384987239" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.464377 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.466088 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:03.966073015 +0000 UTC m=+143.893154422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.484505 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" event={"ID":"21a47c79-31fd-4082-86f4-8b902af7b09d","Type":"ContainerStarted","Data":"215542cb22da421aa8463022f7eb783c59df3ef656c7b219c3a9ed7ffc80e13d"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.484557 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" event={"ID":"21a47c79-31fd-4082-86f4-8b902af7b09d","Type":"ContainerStarted","Data":"822817a3d7bb4b4d48099406183938bbd279d2e8a9069e5b6ed04b8af1b47c0e"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.485037 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.487012 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-t4mgg" podStartSLOduration=123.486984667 podStartE2EDuration="2m3.486984667s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.485306504 +0000 UTC m=+143.412387921" watchObservedRunningTime="2025-12-01 20:07:03.486984667 +0000 UTC m=+143.414066084" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.489667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" event={"ID":"4458fbe8-b293-43d8-8dd3-3a443b22191f","Type":"ContainerStarted","Data":"9c29a474ac988928df1e0557eeccc852787b0174470cec01ede93712b950806d"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.491234 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" event={"ID":"57147412-f20a-4dbd-9337-4839c074baaf","Type":"ContainerStarted","Data":"f09e0cc697de6f72d805eb65f40b573c3c9d63ae2813b96e89d17cabcacdd0ad"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.492147 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.522805 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8wlj7" podStartSLOduration=123.522791 podStartE2EDuration="2m3.522791s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.521786528 +0000 UTC m=+143.448867945" watchObservedRunningTime="2025-12-01 20:07:03.522791 +0000 UTC m=+143.449872417" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.530704 4852 generic.go:334] "Generic (PLEG): container finished" podID="8f2fd72c-c9d0-400a-9658-b1a89365a32e" containerID="0718867049ae5564c436861dec2822bed5a18aa1fadf4749b080b5507ac5a4fa" exitCode=0 Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.530825 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" event={"ID":"8f2fd72c-c9d0-400a-9658-b1a89365a32e","Type":"ContainerStarted","Data":"3da8a69e91de806374bcf8e6b6cd5dec1435595e4604efc67c2b2c02ac6f141c"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.530857 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" event={"ID":"8f2fd72c-c9d0-400a-9658-b1a89365a32e","Type":"ContainerDied","Data":"0718867049ae5564c436861dec2822bed5a18aa1fadf4749b080b5507ac5a4fa"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.540982 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" event={"ID":"65c26ddd-9a26-4b9c-b3fa-74827d33872a","Type":"ContainerStarted","Data":"71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.542118 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.551733 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-v6j29" podStartSLOduration=123.551716281 podStartE2EDuration="2m3.551716281s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.551656599 +0000 UTC m=+143.478738016" watchObservedRunningTime="2025-12-01 20:07:03.551716281 +0000 UTC m=+143.478797698" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.561192 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" event={"ID":"a55c2bbe-e79b-41f2-9349-8d96592516d1","Type":"ContainerStarted","Data":"40f50caa914c8c7ac996868a6b96176cd097d23f1fcdc8f37702b3c94c01e8d6"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.569208 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.569840 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.069789833 +0000 UTC m=+143.996871250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.570278 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.570833 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.070822376 +0000 UTC m=+143.997903993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.573644 4852 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-zfljp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.573726 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.588079 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" event={"ID":"20a3acb3-28ed-4ce0-a281-b9006b6e7fe6","Type":"ContainerStarted","Data":"19e894da4f7ef50d7dd08077af05e4a5e4bbf0ff904ba8fcee116cadb7b2906d"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.601510 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7khwn" podStartSLOduration=123.601488993 podStartE2EDuration="2m3.601488993s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.589586361 +0000 UTC m=+143.516667788" watchObservedRunningTime="2025-12-01 20:07:03.601488993 +0000 UTC m=+143.528570410" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.603631 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4vvkx"] Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.604562 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" event={"ID":"1d760681-acd7-4c14-9510-74a753796ac9","Type":"ContainerStarted","Data":"18cc3ae345dc17cb514f79c13567de7bb41710033a3f72bc58918238fd5a4159"} Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.604665 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.607181 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.641325 4852 patch_prober.go:28] interesting pod/downloads-7954f5f757-nqllf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.641406 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-nqllf" podUID="79fbfc7e-85e3-4d27-8aec-c0157592c888" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.642232 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k4r8z" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.642279 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-r2rjr" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.649294 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4vvkx"] Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.659995 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-t5h7h" podStartSLOduration=123.659961645 podStartE2EDuration="2m3.659961645s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.640994905 +0000 UTC m=+143.568076322" watchObservedRunningTime="2025-12-01 20:07:03.659961645 +0000 UTC m=+143.587043072" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.676072 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.676314 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.676519 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.676763 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8tnt\" (UniqueName: \"kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.677292 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.177277823 +0000 UTC m=+144.104359230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.681237 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hctzd" podStartSLOduration=123.68121584 podStartE2EDuration="2m3.68121584s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.675093853 +0000 UTC m=+143.602175280" watchObservedRunningTime="2025-12-01 20:07:03.68121584 +0000 UTC m=+143.608297257" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.768513 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" podStartSLOduration=123.768475459 podStartE2EDuration="2m3.768475459s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.756526304 +0000 UTC m=+143.683607731" watchObservedRunningTime="2025-12-01 20:07:03.768475459 +0000 UTC m=+143.695556876" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.771147 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5st5z" podStartSLOduration=123.771138784 podStartE2EDuration="2m3.771138784s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.724757011 +0000 UTC m=+143.651838448" watchObservedRunningTime="2025-12-01 20:07:03.771138784 +0000 UTC m=+143.698220201" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.784096 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8tnt\" (UniqueName: \"kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.809168 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cf7w4" podStartSLOduration=123.809141817 podStartE2EDuration="2m3.809141817s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.786665164 +0000 UTC m=+143.713746591" watchObservedRunningTime="2025-12-01 20:07:03.809141817 +0000 UTC m=+143.736223234" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.811191 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.811402 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.811558 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.812336 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.812993 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.813795 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.313779347 +0000 UTC m=+144.240860764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.814301 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sxthp"] Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.815426 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.839101 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.843543 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" podStartSLOduration=123.843524894 podStartE2EDuration="2m3.843524894s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.839388781 +0000 UTC m=+143.766470198" watchObservedRunningTime="2025-12-01 20:07:03.843524894 +0000 UTC m=+143.770606311" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.844044 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sxthp"] Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.850897 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8tnt\" (UniqueName: \"kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt\") pod \"community-operators-4vvkx\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.918082 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.918267 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.918292 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.918343 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vztx\" (UniqueName: \"kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:03 crc kubenswrapper[4852]: E1201 20:07:03.918464 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.418431625 +0000 UTC m=+144.345513042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.919891 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" podStartSLOduration=123.919874282 podStartE2EDuration="2m3.919874282s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.913746284 +0000 UTC m=+143.840827701" watchObservedRunningTime="2025-12-01 20:07:03.919874282 +0000 UTC m=+143.846955699" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.930787 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.960622 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.985545 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" podStartSLOduration=123.985514264 podStartE2EDuration="2m3.985514264s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:03.978010053 +0000 UTC m=+143.905091470" watchObservedRunningTime="2025-12-01 20:07:03.985514264 +0000 UTC m=+143.912595681" Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.986532 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:07:03 crc kubenswrapper[4852]: I1201 20:07:03.987792 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.005113 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.019155 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" podStartSLOduration=124.019137766 podStartE2EDuration="2m4.019137766s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:04.017883977 +0000 UTC m=+143.944965394" watchObservedRunningTime="2025-12-01 20:07:04.019137766 +0000 UTC m=+143.946219183" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.019865 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.019910 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.019971 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vztx\" (UniqueName: \"kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.020019 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.020367 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.520351056 +0000 UTC m=+144.447432473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.020622 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.020992 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.047491 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4k28k" podStartSLOduration=124.047470409 podStartE2EDuration="2m4.047470409s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:04.046018492 +0000 UTC m=+143.973099899" watchObservedRunningTime="2025-12-01 20:07:04.047470409 +0000 UTC m=+143.974551816" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.048079 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vztx\" (UniqueName: \"kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx\") pod \"certified-operators-sxthp\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.105488 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:04 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:04 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:04 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.106235 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.120968 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.121206 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.121257 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwxm4\" (UniqueName: \"kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.121321 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.121536 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.621518493 +0000 UTC m=+144.548599920 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.186907 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.187835 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.194874 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.208110 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.222146 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.222195 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwxm4\" (UniqueName: \"kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.222241 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.222264 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.222603 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.722590255 +0000 UTC m=+144.649671662 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.223079 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.224606 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.264868 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwxm4\" (UniqueName: \"kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4\") pod \"community-operators-sr6w6\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.306948 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.335140 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.335468 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.335488 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrnm9\" (UniqueName: \"kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.335521 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.335639 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.835623894 +0000 UTC m=+144.762705301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.437862 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.437902 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.437920 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrnm9\" (UniqueName: \"kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.437946 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.438855 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.438921 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.439180 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:04.939167146 +0000 UTC m=+144.866248563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.471984 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrnm9\" (UniqueName: \"kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9\") pod \"certified-operators-9pz94\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.512836 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.539206 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.539701 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.039681452 +0000 UTC m=+144.966762869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.617930 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" event={"ID":"8f2fd72c-c9d0-400a-9658-b1a89365a32e","Type":"ContainerStarted","Data":"11b4483bec778b30bf6d41ea081667eaa5d436b48ec3150c29dd0eacee3ef836"} Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.620501 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mbbfj" event={"ID":"6f1d808e-f29f-40db-a8c6-f2af78c6ae2a","Type":"ContainerStarted","Data":"c500ccbf67cf3f2a11bdd216dac6b504ed07549c22f211583cbd489e5323923e"} Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.622535 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" event={"ID":"4458fbe8-b293-43d8-8dd3-3a443b22191f","Type":"ContainerStarted","Data":"9f7193592fdc795dd155aaeb5cc4f35c12c73320e65561ce8f013b066439a391"} Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.653075 4852 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-zfljp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.653133 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.654358 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.654706 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.154696654 +0000 UTC m=+145.081778071 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.735711 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" podStartSLOduration=124.735692171 podStartE2EDuration="2m4.735692171s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:04.692576454 +0000 UTC m=+144.619657871" watchObservedRunningTime="2025-12-01 20:07:04.735692171 +0000 UTC m=+144.662773588" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.761027 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.763524 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.263489776 +0000 UTC m=+145.190571193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.865420 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.866315 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.366301676 +0000 UTC m=+145.293383093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.913359 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4vvkx"] Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.920607 4852 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.967590 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:04 crc kubenswrapper[4852]: E1201 20:07:04.967871 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.467855464 +0000 UTC m=+145.394936881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:04 crc kubenswrapper[4852]: I1201 20:07:04.977870 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sxthp"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.071181 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: E1201 20:07:05.071564 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.571552952 +0000 UTC m=+145.498634369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.117911 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:05 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:05 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:05 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.118000 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.140976 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.175777 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:05 crc kubenswrapper[4852]: E1201 20:07:05.176095 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.676079747 +0000 UTC m=+145.603161164 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.266380 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.277503 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: E1201 20:07:05.277778 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.77776708 +0000 UTC m=+145.704848497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:05 crc kubenswrapper[4852]: W1201 20:07:05.333925 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc30293cc_0c33_4001_9e98_82d881fb74d8.slice/crio-3bd3a101db5d908955ed2a71bc8ae57d0cbf03b87d94735042e16c963f1deddb WatchSource:0}: Error finding container 3bd3a101db5d908955ed2a71bc8ae57d0cbf03b87d94735042e16c963f1deddb: Status 404 returned error can't find the container with id 3bd3a101db5d908955ed2a71bc8ae57d0cbf03b87d94735042e16c963f1deddb Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.378609 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:05 crc kubenswrapper[4852]: E1201 20:07:05.378931 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.878916715 +0000 UTC m=+145.805998132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.480239 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: E1201 20:07:05.480905 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 20:07:05.980873127 +0000 UTC m=+145.907954544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ld6ql" (UID: "551cdd05-d373-4936-b295-281f59449cde") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.523797 4852 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-01T20:07:04.920628324Z","Handler":null,"Name":""} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.535061 4852 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.535120 4852 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.567625 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f5255"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.568769 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.572609 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.582620 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.586296 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.587380 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5255"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.631063 4852 generic.go:334] "Generic (PLEG): container finished" podID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerID="2ad9a2ff2103f27ace8ccbbc55f076bded99b65f5330ba746b2d2e17e1a3962a" exitCode=0 Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.631160 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerDied","Data":"2ad9a2ff2103f27ace8ccbbc55f076bded99b65f5330ba746b2d2e17e1a3962a"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.631203 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerStarted","Data":"9f8f488bfdf1d755934c3cda0b248738731f1fc497a154ec76a5bdcc69fbe838"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.634571 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.634827 4852 generic.go:334] "Generic (PLEG): container finished" podID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerID="8e1cc78c549adb6205db3f36f9de3850f886a8d6482c68abfb05d59c7abf802c" exitCode=0 Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.634899 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerDied","Data":"8e1cc78c549adb6205db3f36f9de3850f886a8d6482c68abfb05d59c7abf802c"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.634919 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerStarted","Data":"65af9180016367e74ea0b0f617241d903076b64a16467a74733d1255cd091a58"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.639650 4852 generic.go:334] "Generic (PLEG): container finished" podID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerID="7f3ca154688157d27613973d270f5ce9d2f396ed397d6df58f9d764a9ffb673b" exitCode=0 Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.639796 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerDied","Data":"7f3ca154688157d27613973d270f5ce9d2f396ed397d6df58f9d764a9ffb673b"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.639892 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerStarted","Data":"3bd3a101db5d908955ed2a71bc8ae57d0cbf03b87d94735042e16c963f1deddb"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.642292 4852 generic.go:334] "Generic (PLEG): container finished" podID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerID="b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f" exitCode=0 Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.642551 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerDied","Data":"b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.643304 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerStarted","Data":"8e2c5db96a1f593aeafb5d71e81a4991877bcf949bbf2254d9d11caf7da65d79"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.652697 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" event={"ID":"4458fbe8-b293-43d8-8dd3-3a443b22191f","Type":"ContainerStarted","Data":"21f2f008c5f44be998b12fec0feb2b95fbdeed03088d7fae54319d725af58ada"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.652747 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" event={"ID":"4458fbe8-b293-43d8-8dd3-3a443b22191f","Type":"ContainerStarted","Data":"cd84808cc1f7c4701d46d4cc69e8e17434bbc2ef56d5aa95f11c71d9972f10e9"} Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.665324 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zxqr5" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.689039 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.689128 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.689223 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.689249 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glmlr\" (UniqueName: \"kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.696654 4852 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.696705 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.734867 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7rzbn" podStartSLOduration=10.734842102 podStartE2EDuration="10.734842102s" podCreationTimestamp="2025-12-01 20:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:05.721485622 +0000 UTC m=+145.648567049" watchObservedRunningTime="2025-12-01 20:07:05.734842102 +0000 UTC m=+145.661923519" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.757340 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ld6ql\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.779188 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.790162 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.790304 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.790528 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glmlr\" (UniqueName: \"kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.797378 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.800314 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.824558 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glmlr\" (UniqueName: \"kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr\") pod \"redhat-marketplace-f5255\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.890381 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.967069 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.968540 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.980171 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:07:05 crc kubenswrapper[4852]: I1201 20:07:05.993619 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:07:06 crc kubenswrapper[4852]: W1201 20:07:06.003128 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod551cdd05_d373_4936_b295_281f59449cde.slice/crio-2cbabc3034c242d9725d791ddafcaf8b2ca849de5b1fb6f7cbbec35977fc511d WatchSource:0}: Error finding container 2cbabc3034c242d9725d791ddafcaf8b2ca849de5b1fb6f7cbbec35977fc511d: Status 404 returned error can't find the container with id 2cbabc3034c242d9725d791ddafcaf8b2ca849de5b1fb6f7cbbec35977fc511d Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.093625 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.093743 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpwzh\" (UniqueName: \"kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.093777 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.097385 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:06 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:06 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:06 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.097922 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.114752 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5255"] Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.194662 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpwzh\" (UniqueName: \"kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.194718 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.194743 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.195158 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.195637 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.217132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpwzh\" (UniqueName: \"kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh\") pod \"redhat-marketplace-sl8rl\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.292144 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.339816 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.535235 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:07:06 crc kubenswrapper[4852]: W1201 20:07:06.543672 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf60b7d08_82e1_4c52_a49b_81ef0fd12bff.slice/crio-0046c8693d2a339320581d9fed8e7a110da163495ef2e928520368f64a3be99b WatchSource:0}: Error finding container 0046c8693d2a339320581d9fed8e7a110da163495ef2e928520368f64a3be99b: Status 404 returned error can't find the container with id 0046c8693d2a339320581d9fed8e7a110da163495ef2e928520368f64a3be99b Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.666748 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" event={"ID":"551cdd05-d373-4936-b295-281f59449cde","Type":"ContainerStarted","Data":"b0fb560d86a283e2a72f59a324a0d146af9b736cb3b44a1435e84df388361d85"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.667164 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" event={"ID":"551cdd05-d373-4936-b295-281f59449cde","Type":"ContainerStarted","Data":"2cbabc3034c242d9725d791ddafcaf8b2ca849de5b1fb6f7cbbec35977fc511d"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.667238 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.672628 4852 generic.go:334] "Generic (PLEG): container finished" podID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerID="8400a975fd7fc38e06882d1d94603a2c7e078c44a7af7573ae36e052b08eb3e0" exitCode=0 Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.673111 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerDied","Data":"8400a975fd7fc38e06882d1d94603a2c7e078c44a7af7573ae36e052b08eb3e0"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.673159 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerStarted","Data":"f29b32314b5b527d8de893f709f403cf7d01c17c398baaef117491aadc1a0df6"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.675823 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerStarted","Data":"0046c8693d2a339320581d9fed8e7a110da163495ef2e928520368f64a3be99b"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.690483 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" podStartSLOduration=126.69042035 podStartE2EDuration="2m6.69042035s" podCreationTimestamp="2025-12-01 20:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:06.686551375 +0000 UTC m=+146.613632812" watchObservedRunningTime="2025-12-01 20:07:06.69042035 +0000 UTC m=+146.617501767" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.692388 4852 generic.go:334] "Generic (PLEG): container finished" podID="b7b65e8c-0f7e-441d-9183-2090247908eb" containerID="74faa56205cc80d24ee9cf4e3670a5d3848e0467f1fc9fb0e4df69104019b683" exitCode=0 Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.692524 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" event={"ID":"b7b65e8c-0f7e-441d-9183-2090247908eb","Type":"ContainerDied","Data":"74faa56205cc80d24ee9cf4e3670a5d3848e0467f1fc9fb0e4df69104019b683"} Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.773225 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dwvx7"] Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.776656 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.788837 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.794224 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dwvx7"] Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.918362 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.918493 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.918549 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqzn7\" (UniqueName: \"kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.961841 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.963037 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.968836 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.976477 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 01 20:07:06 crc kubenswrapper[4852]: I1201 20:07:06.976861 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.020274 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqzn7\" (UniqueName: \"kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.020415 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.020481 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.020893 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.021428 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.045291 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqzn7\" (UniqueName: \"kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7\") pod \"redhat-operators-dwvx7\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.098543 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:07 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:07 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:07 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.098617 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.112064 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.121904 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.122016 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.195994 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.197814 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.204440 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.224336 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.225195 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.225398 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.257466 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.301690 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.326733 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.326825 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.326848 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj7lm\" (UniqueName: \"kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.326879 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.326915 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.331521 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.345731 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.430971 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.431603 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.431662 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.431681 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.431720 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj7lm\" (UniqueName: \"kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.432796 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.433940 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.437863 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.437893 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.445171 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dwvx7"] Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.453764 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj7lm\" (UniqueName: \"kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm\") pod \"redhat-operators-tqk9c\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: W1201 20:07:07.482582 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10a3a8e7_980e_4015_9418_f0854d431b85.slice/crio-ebb2b64352783fb7263744185a790ef65a6530bf758493197e8ec831180c1039 WatchSource:0}: Error finding container ebb2b64352783fb7263744185a790ef65a6530bf758493197e8ec831180c1039: Status 404 returned error can't find the container with id ebb2b64352783fb7263744185a790ef65a6530bf758493197e8ec831180c1039 Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.513586 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.546690 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.557091 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.633892 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.636663 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a-metrics-certs\") pod \"network-metrics-daemon-j2q4c\" (UID: \"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a\") " pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.636731 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.643775 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.643991 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2q4c" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.644172 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.654933 4852 patch_prober.go:28] interesting pod/console-f9d7485db-glnwk container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.655043 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-glnwk" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.731882 4852 generic.go:334] "Generic (PLEG): container finished" podID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerID="a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005" exitCode=0 Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.731946 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerDied","Data":"a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005"} Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.741741 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerStarted","Data":"ebb2b64352783fb7263744185a790ef65a6530bf758493197e8ec831180c1039"} Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.752612 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.768638 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.769290 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.774933 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:07:07 crc kubenswrapper[4852]: W1201 20:07:07.835411 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1c18714c_2f46_4e5b_86e2_d25ed0574340.slice/crio-f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d WatchSource:0}: Error finding container f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d: Status 404 returned error can't find the container with id f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.919108 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.919151 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:07:07 crc kubenswrapper[4852]: I1201 20:07:07.945157 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.019520 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-nqllf" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.094181 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.100772 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:08 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:08 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:08 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.100835 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.181301 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.243025 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnzt7\" (UniqueName: \"kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7\") pod \"b7b65e8c-0f7e-441d-9183-2090247908eb\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.243160 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume\") pod \"b7b65e8c-0f7e-441d-9183-2090247908eb\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.243205 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume\") pod \"b7b65e8c-0f7e-441d-9183-2090247908eb\" (UID: \"b7b65e8c-0f7e-441d-9183-2090247908eb\") " Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.245558 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7b65e8c-0f7e-441d-9183-2090247908eb" (UID: "b7b65e8c-0f7e-441d-9183-2090247908eb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.256515 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7b65e8c-0f7e-441d-9183-2090247908eb" (UID: "b7b65e8c-0f7e-441d-9183-2090247908eb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.256638 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7" (OuterVolumeSpecName: "kube-api-access-bnzt7") pod "b7b65e8c-0f7e-441d-9183-2090247908eb" (UID: "b7b65e8c-0f7e-441d-9183-2090247908eb"). InnerVolumeSpecName "kube-api-access-bnzt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.344427 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7b65e8c-0f7e-441d-9183-2090247908eb-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.344475 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7b65e8c-0f7e-441d-9183-2090247908eb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.344488 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnzt7\" (UniqueName: \"kubernetes.io/projected/b7b65e8c-0f7e-441d-9183-2090247908eb-kube-api-access-bnzt7\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.403119 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.405889 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j2q4c"] Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.418744 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:07:08 crc kubenswrapper[4852]: W1201 20:07:08.421364 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ae4c9c3_3bfa_4958_8365_0b1b4a83b98a.slice/crio-891e0d67600a4956acd05c9b9abc942c57df5ecf76045a938819745c030d8ea3 WatchSource:0}: Error finding container 891e0d67600a4956acd05c9b9abc942c57df5ecf76045a938819745c030d8ea3: Status 404 returned error can't find the container with id 891e0d67600a4956acd05c9b9abc942c57df5ecf76045a938819745c030d8ea3 Dec 01 20:07:08 crc kubenswrapper[4852]: W1201 20:07:08.422592 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-00a4294b95fc217c014bbb7c9e1ad705e97998cf0a09cafec8463c56618fd7ba WatchSource:0}: Error finding container 00a4294b95fc217c014bbb7c9e1ad705e97998cf0a09cafec8463c56618fd7ba: Status 404 returned error can't find the container with id 00a4294b95fc217c014bbb7c9e1ad705e97998cf0a09cafec8463c56618fd7ba Dec 01 20:07:08 crc kubenswrapper[4852]: W1201 20:07:08.443480 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bed1d58_1604_4e6f_a217_244cdc5c77e1.slice/crio-bb698140e4370234679ce2459e42f120ac53b2dae3b0345253e29487cf078053 WatchSource:0}: Error finding container bb698140e4370234679ce2459e42f120ac53b2dae3b0345253e29487cf078053: Status 404 returned error can't find the container with id bb698140e4370234679ce2459e42f120ac53b2dae3b0345253e29487cf078053 Dec 01 20:07:08 crc kubenswrapper[4852]: W1201 20:07:08.574267 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-bc19465e21fabf56d541c9fafb04908d66fdfe0d832b9a617ac355a0ef7ba9bd WatchSource:0}: Error finding container bc19465e21fabf56d541c9fafb04908d66fdfe0d832b9a617ac355a0ef7ba9bd: Status 404 returned error can't find the container with id bc19465e21fabf56d541c9fafb04908d66fdfe0d832b9a617ac355a0ef7ba9bd Dec 01 20:07:08 crc kubenswrapper[4852]: W1201 20:07:08.576356 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-1b57430a058cb12358c4805bc8586a1dcf61574326751dda433736386a9002d3 WatchSource:0}: Error finding container 1b57430a058cb12358c4805bc8586a1dcf61574326751dda433736386a9002d3: Status 404 returned error can't find the container with id 1b57430a058cb12358c4805bc8586a1dcf61574326751dda433736386a9002d3 Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.793052 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"bc19465e21fabf56d541c9fafb04908d66fdfe0d832b9a617ac355a0ef7ba9bd"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.796829 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1b57430a058cb12358c4805bc8586a1dcf61574326751dda433736386a9002d3"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.800393 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" event={"ID":"b7b65e8c-0f7e-441d-9183-2090247908eb","Type":"ContainerDied","Data":"528c09c7770156adb5e1950a8f8a6ec3c1ef409d48041bee65cf25fb25395f55"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.800489 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.800447 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="528c09c7770156adb5e1950a8f8a6ec3c1ef409d48041bee65cf25fb25395f55" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.805568 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"00a4294b95fc217c014bbb7c9e1ad705e97998cf0a09cafec8463c56618fd7ba"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.813178 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c18714c-2f46-4e5b-86e2-d25ed0574340","Type":"ContainerStarted","Data":"ed24f9b6d257c2f0b0ab2b60c8eb54c7bff663b367e720112ca40734267d55cb"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.813245 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c18714c-2f46-4e5b-86e2-d25ed0574340","Type":"ContainerStarted","Data":"f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.818806 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" event={"ID":"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a","Type":"ContainerStarted","Data":"891e0d67600a4956acd05c9b9abc942c57df5ecf76045a938819745c030d8ea3"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.831931 4852 generic.go:334] "Generic (PLEG): container finished" podID="10a3a8e7-980e-4015-9418-f0854d431b85" containerID="a2841a2ed43bf7ba08988da1cd541ad8da84c85e890c0b169c3198465ba300c4" exitCode=0 Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.832334 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerDied","Data":"a2841a2ed43bf7ba08988da1cd541ad8da84c85e890c0b169c3198465ba300c4"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.839904 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerStarted","Data":"bb698140e4370234679ce2459e42f120ac53b2dae3b0345253e29487cf078053"} Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.845567 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-q48jr" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.848734 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-9hnnb" Dec 01 20:07:08 crc kubenswrapper[4852]: I1201 20:07:08.858872 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.858840277 podStartE2EDuration="2.858840277s" podCreationTimestamp="2025-12-01 20:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:08.841842671 +0000 UTC m=+148.768924088" watchObservedRunningTime="2025-12-01 20:07:08.858840277 +0000 UTC m=+148.785921684" Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.099128 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:09 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:09 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:09 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.099178 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.853798 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ec15975302328935d25e0744499530872d9e77a0680fdc4af1be44cb03ff27ed"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.877067 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d016b1b3e6e90d96c6b531829aa22ba3871a8b8d7826c29d1ff73a110a6d65d8"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.877120 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.886833 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a7f0045a67cfca656cf9073e61b688581905d404d888bada9204ef8a0446eb01"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.894623 4852 generic.go:334] "Generic (PLEG): container finished" podID="1c18714c-2f46-4e5b-86e2-d25ed0574340" containerID="ed24f9b6d257c2f0b0ab2b60c8eb54c7bff663b367e720112ca40734267d55cb" exitCode=0 Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.894686 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c18714c-2f46-4e5b-86e2-d25ed0574340","Type":"ContainerDied","Data":"ed24f9b6d257c2f0b0ab2b60c8eb54c7bff663b367e720112ca40734267d55cb"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.898417 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" event={"ID":"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a","Type":"ContainerStarted","Data":"771bc9e3f0891f7d45b2d5ba62c85c54b9d5fae13e3949b212da08b6e93e8f6a"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.898445 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2q4c" event={"ID":"7ae4c9c3-3bfa-4958-8365-0b1b4a83b98a","Type":"ContainerStarted","Data":"67d3d8ce8e72c14dd650e0f3510e7c69360d9ad0104178c489a8f3bbeb7cb8d2"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.911233 4852 generic.go:334] "Generic (PLEG): container finished" podID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerID="8ef40f8a1ebc1f7b092a8ce8d16fb40f720930f6a8c04b050ab56143246809c3" exitCode=0 Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.911491 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerDied","Data":"8ef40f8a1ebc1f7b092a8ce8d16fb40f720930f6a8c04b050ab56143246809c3"} Dec 01 20:07:09 crc kubenswrapper[4852]: I1201 20:07:09.933242 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-j2q4c" podStartSLOduration=130.93322331 podStartE2EDuration="2m10.93322331s" podCreationTimestamp="2025-12-01 20:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:09.91799249 +0000 UTC m=+149.845073907" watchObservedRunningTime="2025-12-01 20:07:09.93322331 +0000 UTC m=+149.860304727" Dec 01 20:07:10 crc kubenswrapper[4852]: I1201 20:07:10.096627 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:10 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:10 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:10 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:10 crc kubenswrapper[4852]: I1201 20:07:10.096711 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.096962 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:11 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:11 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:11 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.097024 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.311273 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.441003 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir\") pod \"1c18714c-2f46-4e5b-86e2-d25ed0574340\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.441162 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access\") pod \"1c18714c-2f46-4e5b-86e2-d25ed0574340\" (UID: \"1c18714c-2f46-4e5b-86e2-d25ed0574340\") " Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.441555 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1c18714c-2f46-4e5b-86e2-d25ed0574340" (UID: "1c18714c-2f46-4e5b-86e2-d25ed0574340"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.467020 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1c18714c-2f46-4e5b-86e2-d25ed0574340" (UID: "1c18714c-2f46-4e5b-86e2-d25ed0574340"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.543185 4852 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c18714c-2f46-4e5b-86e2-d25ed0574340-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.543230 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c18714c-2f46-4e5b-86e2-d25ed0574340-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.981950 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c18714c-2f46-4e5b-86e2-d25ed0574340","Type":"ContainerDied","Data":"f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d"} Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.982061 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4e78ee0c545182773bdc84b43d5266c9970eaa51754885a42a87cadb6ed1b1d" Dec 01 20:07:11 crc kubenswrapper[4852]: I1201 20:07:11.982204 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.100990 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:12 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:12 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:12 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.101129 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.179878 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 20:07:12 crc kubenswrapper[4852]: E1201 20:07:12.180224 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b65e8c-0f7e-441d-9183-2090247908eb" containerName="collect-profiles" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.180242 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b65e8c-0f7e-441d-9183-2090247908eb" containerName="collect-profiles" Dec 01 20:07:12 crc kubenswrapper[4852]: E1201 20:07:12.180265 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c18714c-2f46-4e5b-86e2-d25ed0574340" containerName="pruner" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.180272 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c18714c-2f46-4e5b-86e2-d25ed0574340" containerName="pruner" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.180383 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b65e8c-0f7e-441d-9183-2090247908eb" containerName="collect-profiles" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.180400 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c18714c-2f46-4e5b-86e2-d25ed0574340" containerName="pruner" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.183814 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.188161 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.188814 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.193215 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.263798 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.263898 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.365085 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.366402 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.365393 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.411212 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.522145 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:12 crc kubenswrapper[4852]: I1201 20:07:12.815513 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 20:07:13 crc kubenswrapper[4852]: I1201 20:07:13.007268 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0231e787-1a13-46e6-9557-99e2a4a3c05e","Type":"ContainerStarted","Data":"6bf8e84334239468705ca80010f68b409a96cb6dd698e1c2dc858b1b047daca5"} Dec 01 20:07:13 crc kubenswrapper[4852]: I1201 20:07:13.097790 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:13 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:13 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:13 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:13 crc kubenswrapper[4852]: I1201 20:07:13.097871 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:13 crc kubenswrapper[4852]: I1201 20:07:13.207106 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-mwxfn" Dec 01 20:07:13 crc kubenswrapper[4852]: I1201 20:07:13.442612 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:07:14 crc kubenswrapper[4852]: I1201 20:07:14.035042 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0231e787-1a13-46e6-9557-99e2a4a3c05e","Type":"ContainerStarted","Data":"949213eb2db704c4fa6b7f77fcee95f60bda7e7d349cf13d7521d123a4022a5d"} Dec 01 20:07:14 crc kubenswrapper[4852]: I1201 20:07:14.055068 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.055032183 podStartE2EDuration="2.055032183s" podCreationTimestamp="2025-12-01 20:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:14.053934138 +0000 UTC m=+153.981015555" watchObservedRunningTime="2025-12-01 20:07:14.055032183 +0000 UTC m=+153.982113630" Dec 01 20:07:14 crc kubenswrapper[4852]: I1201 20:07:14.098658 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:14 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:14 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:14 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:14 crc kubenswrapper[4852]: I1201 20:07:14.098737 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:15 crc kubenswrapper[4852]: I1201 20:07:15.047269 4852 generic.go:334] "Generic (PLEG): container finished" podID="0231e787-1a13-46e6-9557-99e2a4a3c05e" containerID="949213eb2db704c4fa6b7f77fcee95f60bda7e7d349cf13d7521d123a4022a5d" exitCode=0 Dec 01 20:07:15 crc kubenswrapper[4852]: I1201 20:07:15.047389 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0231e787-1a13-46e6-9557-99e2a4a3c05e","Type":"ContainerDied","Data":"949213eb2db704c4fa6b7f77fcee95f60bda7e7d349cf13d7521d123a4022a5d"} Dec 01 20:07:15 crc kubenswrapper[4852]: I1201 20:07:15.096583 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:15 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:15 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:15 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:15 crc kubenswrapper[4852]: I1201 20:07:15.096679 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.098787 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:16 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:16 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:16 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.099388 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.415739 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.540572 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access\") pod \"0231e787-1a13-46e6-9557-99e2a4a3c05e\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.540713 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir\") pod \"0231e787-1a13-46e6-9557-99e2a4a3c05e\" (UID: \"0231e787-1a13-46e6-9557-99e2a4a3c05e\") " Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.541220 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0231e787-1a13-46e6-9557-99e2a4a3c05e" (UID: "0231e787-1a13-46e6-9557-99e2a4a3c05e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.548721 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0231e787-1a13-46e6-9557-99e2a4a3c05e" (UID: "0231e787-1a13-46e6-9557-99e2a4a3c05e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.642607 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0231e787-1a13-46e6-9557-99e2a4a3c05e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:16 crc kubenswrapper[4852]: I1201 20:07:16.642671 4852 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0231e787-1a13-46e6-9557-99e2a4a3c05e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.069160 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0231e787-1a13-46e6-9557-99e2a4a3c05e","Type":"ContainerDied","Data":"6bf8e84334239468705ca80010f68b409a96cb6dd698e1c2dc858b1b047daca5"} Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.069563 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bf8e84334239468705ca80010f68b409a96cb6dd698e1c2dc858b1b047daca5" Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.069641 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.099417 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:17 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:17 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:17 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.099685 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.643360 4852 patch_prober.go:28] interesting pod/console-f9d7485db-glnwk container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Dec 01 20:07:17 crc kubenswrapper[4852]: I1201 20:07:17.643537 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-glnwk" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Dec 01 20:07:18 crc kubenswrapper[4852]: I1201 20:07:18.095596 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:18 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:18 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:18 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:18 crc kubenswrapper[4852]: I1201 20:07:18.095659 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:19 crc kubenswrapper[4852]: I1201 20:07:19.097083 4852 patch_prober.go:28] interesting pod/router-default-5444994796-pvvl9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 20:07:19 crc kubenswrapper[4852]: [-]has-synced failed: reason withheld Dec 01 20:07:19 crc kubenswrapper[4852]: [+]process-running ok Dec 01 20:07:19 crc kubenswrapper[4852]: healthz check failed Dec 01 20:07:19 crc kubenswrapper[4852]: I1201 20:07:19.097505 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pvvl9" podUID="93085d90-0464-4c0c-9908-237d7cb85b24" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 20:07:20 crc kubenswrapper[4852]: I1201 20:07:20.099633 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:07:20 crc kubenswrapper[4852]: I1201 20:07:20.101900 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-pvvl9" Dec 01 20:07:20 crc kubenswrapper[4852]: I1201 20:07:20.229860 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:07:20 crc kubenswrapper[4852]: I1201 20:07:20.229943 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:07:25 crc kubenswrapper[4852]: I1201 20:07:25.784280 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:07:27 crc kubenswrapper[4852]: I1201 20:07:27.681149 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:07:27 crc kubenswrapper[4852]: I1201 20:07:27.685919 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:07:36 crc kubenswrapper[4852]: E1201 20:07:36.220133 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 20:07:36 crc kubenswrapper[4852]: E1201 20:07:36.220736 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x8tnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4vvkx_openshift-marketplace(bb095312-fe29-458a-b5d5-8fd607a89e8b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 20:07:36 crc kubenswrapper[4852]: E1201 20:07:36.222039 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4vvkx" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" Dec 01 20:07:38 crc kubenswrapper[4852]: I1201 20:07:38.433785 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-56wb9" Dec 01 20:07:39 crc kubenswrapper[4852]: E1201 20:07:39.149849 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4vvkx" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" Dec 01 20:07:39 crc kubenswrapper[4852]: E1201 20:07:39.244169 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 20:07:39 crc kubenswrapper[4852]: E1201 20:07:39.245170 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kwxm4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-sr6w6_openshift-marketplace(7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 20:07:39 crc kubenswrapper[4852]: E1201 20:07:39.246705 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-sr6w6" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.106179 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.106848 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-glmlr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-f5255_openshift-marketplace(f9e7ffee-9028-45cb-83bd-3d5a0019ab16): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.110326 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-f5255" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.181680 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.182120 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cpwzh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-sl8rl_openshift-marketplace(f60b7d08-82e1-4c52-a49b-81ef0fd12bff): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.183929 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-sl8rl" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.211115 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5255" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.211837 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-sl8rl" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" Dec 01 20:07:40 crc kubenswrapper[4852]: E1201 20:07:40.214360 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-sr6w6" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.220814 4852 generic.go:334] "Generic (PLEG): container finished" podID="10a3a8e7-980e-4015-9418-f0854d431b85" containerID="c5a91396e66eeacac4df8acca894570eb56a767e903d28fe192e4812c2b5b1dc" exitCode=0 Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.220879 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerDied","Data":"c5a91396e66eeacac4df8acca894570eb56a767e903d28fe192e4812c2b5b1dc"} Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.226440 4852 generic.go:334] "Generic (PLEG): container finished" podID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerID="20c8d4ae4fc2b12d0673053d3737505bd776f5757b78ae95c26d48bcb2bbd352" exitCode=0 Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.226568 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerDied","Data":"20c8d4ae4fc2b12d0673053d3737505bd776f5757b78ae95c26d48bcb2bbd352"} Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.229628 4852 generic.go:334] "Generic (PLEG): container finished" podID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerID="3f3e6304abe98d73fc173c9f0274d337e07253c8dfd3cefb921d990771edec58" exitCode=0 Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.229846 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerDied","Data":"3f3e6304abe98d73fc173c9f0274d337e07253c8dfd3cefb921d990771edec58"} Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.233620 4852 generic.go:334] "Generic (PLEG): container finished" podID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerID="33a52364302e089cb24ba73405b3dad0f5ea7d312fd6a559056ae64de1f3a949" exitCode=0 Dec 01 20:07:41 crc kubenswrapper[4852]: I1201 20:07:41.234595 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerDied","Data":"33a52364302e089cb24ba73405b3dad0f5ea7d312fd6a559056ae64de1f3a949"} Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.253425 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerStarted","Data":"96670ad37131c99deab094770f0a55de0cb569b5326148e5b062f50b741cb8a1"} Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.258255 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerStarted","Data":"f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab"} Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.261354 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerStarted","Data":"9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772"} Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.263820 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerStarted","Data":"0296f47f1ff3c88fafd254d07fa02e7853958f02b51b2fe63674265f1b997f30"} Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.279367 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9pz94" podStartSLOduration=2.5971100480000002 podStartE2EDuration="39.27933958s" podCreationTimestamp="2025-12-01 20:07:04 +0000 UTC" firstStartedPulling="2025-12-01 20:07:05.641954993 +0000 UTC m=+145.569036410" lastFinishedPulling="2025-12-01 20:07:42.324184525 +0000 UTC m=+182.251265942" observedRunningTime="2025-12-01 20:07:43.278066399 +0000 UTC m=+183.205147826" watchObservedRunningTime="2025-12-01 20:07:43.27933958 +0000 UTC m=+183.206420997" Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.296913 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dwvx7" podStartSLOduration=3.7310751030000002 podStartE2EDuration="37.296890284s" podCreationTimestamp="2025-12-01 20:07:06 +0000 UTC" firstStartedPulling="2025-12-01 20:07:08.835759765 +0000 UTC m=+148.762841182" lastFinishedPulling="2025-12-01 20:07:42.401574946 +0000 UTC m=+182.328656363" observedRunningTime="2025-12-01 20:07:43.293748243 +0000 UTC m=+183.220829660" watchObservedRunningTime="2025-12-01 20:07:43.296890284 +0000 UTC m=+183.223971701" Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.311506 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tqk9c" podStartSLOduration=2.993108087 podStartE2EDuration="36.311487154s" podCreationTimestamp="2025-12-01 20:07:07 +0000 UTC" firstStartedPulling="2025-12-01 20:07:08.844966651 +0000 UTC m=+148.772048068" lastFinishedPulling="2025-12-01 20:07:42.163345718 +0000 UTC m=+182.090427135" observedRunningTime="2025-12-01 20:07:43.310971588 +0000 UTC m=+183.238053005" watchObservedRunningTime="2025-12-01 20:07:43.311487154 +0000 UTC m=+183.238568561" Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.333366 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sxthp" podStartSLOduration=3.729952212 podStartE2EDuration="40.333339538s" podCreationTimestamp="2025-12-01 20:07:03 +0000 UTC" firstStartedPulling="2025-12-01 20:07:05.636569598 +0000 UTC m=+145.563651015" lastFinishedPulling="2025-12-01 20:07:42.239956914 +0000 UTC m=+182.167038341" observedRunningTime="2025-12-01 20:07:43.33090581 +0000 UTC m=+183.257987227" watchObservedRunningTime="2025-12-01 20:07:43.333339538 +0000 UTC m=+183.260420955" Dec 01 20:07:43 crc kubenswrapper[4852]: I1201 20:07:43.638365 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.173515 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 20:07:44 crc kubenswrapper[4852]: E1201 20:07:44.173892 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0231e787-1a13-46e6-9557-99e2a4a3c05e" containerName="pruner" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.173917 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0231e787-1a13-46e6-9557-99e2a4a3c05e" containerName="pruner" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.174081 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0231e787-1a13-46e6-9557-99e2a4a3c05e" containerName="pruner" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.174613 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.180083 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.180342 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.190588 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.209284 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.209331 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.250665 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.250754 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.352302 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.352750 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.352527 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.375581 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.491159 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.514386 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.514442 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.563242 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:44 crc kubenswrapper[4852]: I1201 20:07:44.914938 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 20:07:45 crc kubenswrapper[4852]: I1201 20:07:45.276134 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-sxthp" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="registry-server" probeResult="failure" output=< Dec 01 20:07:45 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:07:45 crc kubenswrapper[4852]: > Dec 01 20:07:45 crc kubenswrapper[4852]: I1201 20:07:45.295742 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd","Type":"ContainerStarted","Data":"a892c095ddc5ac40cd89c117ada58c2858eebdf9b19e29b10e5b9db5924cdfdb"} Dec 01 20:07:46 crc kubenswrapper[4852]: I1201 20:07:46.304642 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd","Type":"ContainerStarted","Data":"b522969691b1f81dae1a84ab00d1fb74a5bb55b28ad354ec0673fafeb5505326"} Dec 01 20:07:46 crc kubenswrapper[4852]: I1201 20:07:46.324972 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.324930172 podStartE2EDuration="2.324930172s" podCreationTimestamp="2025-12-01 20:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:46.32113694 +0000 UTC m=+186.248218357" watchObservedRunningTime="2025-12-01 20:07:46.324930172 +0000 UTC m=+186.252011589" Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.112217 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.112285 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.311922 4852 generic.go:334] "Generic (PLEG): container finished" podID="26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" containerID="b522969691b1f81dae1a84ab00d1fb74a5bb55b28ad354ec0673fafeb5505326" exitCode=0 Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.312094 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd","Type":"ContainerDied","Data":"b522969691b1f81dae1a84ab00d1fb74a5bb55b28ad354ec0673fafeb5505326"} Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.514149 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.514199 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:47 crc kubenswrapper[4852]: I1201 20:07:47.641853 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.157227 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dwvx7" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="registry-server" probeResult="failure" output=< Dec 01 20:07:48 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:07:48 crc kubenswrapper[4852]: > Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.557911 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tqk9c" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="registry-server" probeResult="failure" output=< Dec 01 20:07:48 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:07:48 crc kubenswrapper[4852]: > Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.648866 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.723182 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir\") pod \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.723581 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access\") pod \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\" (UID: \"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd\") " Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.724979 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" (UID: "26ef9bc4-15e4-4133-8f33-b384a3ed2bdd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.733374 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" (UID: "26ef9bc4-15e4-4133-8f33-b384a3ed2bdd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.826618 4852 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:48 crc kubenswrapper[4852]: I1201 20:07:48.826662 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26ef9bc4-15e4-4133-8f33-b384a3ed2bdd-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:49 crc kubenswrapper[4852]: I1201 20:07:49.336508 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"26ef9bc4-15e4-4133-8f33-b384a3ed2bdd","Type":"ContainerDied","Data":"a892c095ddc5ac40cd89c117ada58c2858eebdf9b19e29b10e5b9db5924cdfdb"} Dec 01 20:07:49 crc kubenswrapper[4852]: I1201 20:07:49.336589 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a892c095ddc5ac40cd89c117ada58c2858eebdf9b19e29b10e5b9db5924cdfdb" Dec 01 20:07:49 crc kubenswrapper[4852]: I1201 20:07:49.336870 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 20:07:50 crc kubenswrapper[4852]: I1201 20:07:50.229595 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:07:50 crc kubenswrapper[4852]: I1201 20:07:50.229677 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.772040 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 20:07:51 crc kubenswrapper[4852]: E1201 20:07:51.772592 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" containerName="pruner" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.772604 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" containerName="pruner" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.772705 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="26ef9bc4-15e4-4133-8f33-b384a3ed2bdd" containerName="pruner" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.773263 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.775658 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.776776 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.788241 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.874714 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.874812 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.874899 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.975800 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.975884 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.975934 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.975989 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.976025 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:51 crc kubenswrapper[4852]: I1201 20:07:51.997760 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access\") pod \"installer-9-crc\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:52 crc kubenswrapper[4852]: I1201 20:07:52.092868 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:07:52 crc kubenswrapper[4852]: I1201 20:07:52.489301 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 20:07:53 crc kubenswrapper[4852]: I1201 20:07:53.470404 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eb1af6df-557b-4088-8ea4-6f1940ddd885","Type":"ContainerStarted","Data":"c1106c221fbfea3d41c35685fa944061e53f4fe9a3b81032f136ca90bca9fb26"} Dec 01 20:07:54 crc kubenswrapper[4852]: I1201 20:07:54.255077 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:54 crc kubenswrapper[4852]: I1201 20:07:54.295242 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:07:54 crc kubenswrapper[4852]: I1201 20:07:54.553495 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:07:55 crc kubenswrapper[4852]: I1201 20:07:55.481848 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eb1af6df-557b-4088-8ea4-6f1940ddd885","Type":"ContainerStarted","Data":"64e1052e537a1aceaf673534962df52d48e33aa8e6a775aeabf65951169081cc"} Dec 01 20:07:55 crc kubenswrapper[4852]: I1201 20:07:55.503614 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.5035821160000005 podStartE2EDuration="4.503582116s" podCreationTimestamp="2025-12-01 20:07:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:07:55.497044545 +0000 UTC m=+195.424125962" watchObservedRunningTime="2025-12-01 20:07:55.503582116 +0000 UTC m=+195.430663563" Dec 01 20:07:56 crc kubenswrapper[4852]: I1201 20:07:56.490498 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:07:56 crc kubenswrapper[4852]: I1201 20:07:56.490700 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9pz94" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="registry-server" containerID="cri-o://96670ad37131c99deab094770f0a55de0cb569b5326148e5b062f50b741cb8a1" gracePeriod=2 Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.210015 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.258037 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.505783 4852 generic.go:334] "Generic (PLEG): container finished" podID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerID="96670ad37131c99deab094770f0a55de0cb569b5326148e5b062f50b741cb8a1" exitCode=0 Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.505861 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerDied","Data":"96670ad37131c99deab094770f0a55de0cb569b5326148e5b062f50b741cb8a1"} Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.619993 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:57 crc kubenswrapper[4852]: I1201 20:07:57.658016 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:07:59 crc kubenswrapper[4852]: I1201 20:07:59.493200 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:07:59 crc kubenswrapper[4852]: I1201 20:07:59.534018 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tqk9c" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="registry-server" containerID="cri-o://0296f47f1ff3c88fafd254d07fa02e7853958f02b51b2fe63674265f1b997f30" gracePeriod=2 Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.577936 4852 generic.go:334] "Generic (PLEG): container finished" podID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerID="0296f47f1ff3c88fafd254d07fa02e7853958f02b51b2fe63674265f1b997f30" exitCode=0 Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.578893 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerDied","Data":"0296f47f1ff3c88fafd254d07fa02e7853958f02b51b2fe63674265f1b997f30"} Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.643377 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.658410 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.722919 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrnm9\" (UniqueName: \"kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9\") pod \"c30293cc-0c33-4001-9e98-82d881fb74d8\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.722985 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities\") pod \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.723006 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities\") pod \"c30293cc-0c33-4001-9e98-82d881fb74d8\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.723024 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content\") pod \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.723105 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content\") pod \"c30293cc-0c33-4001-9e98-82d881fb74d8\" (UID: \"c30293cc-0c33-4001-9e98-82d881fb74d8\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.723126 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj7lm\" (UniqueName: \"kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm\") pod \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\" (UID: \"7bed1d58-1604-4e6f-a217-244cdc5c77e1\") " Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.724120 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities" (OuterVolumeSpecName: "utilities") pod "7bed1d58-1604-4e6f-a217-244cdc5c77e1" (UID: "7bed1d58-1604-4e6f-a217-244cdc5c77e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.724657 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities" (OuterVolumeSpecName: "utilities") pod "c30293cc-0c33-4001-9e98-82d881fb74d8" (UID: "c30293cc-0c33-4001-9e98-82d881fb74d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.732675 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9" (OuterVolumeSpecName: "kube-api-access-wrnm9") pod "c30293cc-0c33-4001-9e98-82d881fb74d8" (UID: "c30293cc-0c33-4001-9e98-82d881fb74d8"). InnerVolumeSpecName "kube-api-access-wrnm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.732791 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm" (OuterVolumeSpecName: "kube-api-access-dj7lm") pod "7bed1d58-1604-4e6f-a217-244cdc5c77e1" (UID: "7bed1d58-1604-4e6f-a217-244cdc5c77e1"). InnerVolumeSpecName "kube-api-access-dj7lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.792667 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c30293cc-0c33-4001-9e98-82d881fb74d8" (UID: "c30293cc-0c33-4001-9e98-82d881fb74d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.824427 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.824488 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj7lm\" (UniqueName: \"kubernetes.io/projected/7bed1d58-1604-4e6f-a217-244cdc5c77e1-kube-api-access-dj7lm\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.824505 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrnm9\" (UniqueName: \"kubernetes.io/projected/c30293cc-0c33-4001-9e98-82d881fb74d8-kube-api-access-wrnm9\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.824516 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.824529 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c30293cc-0c33-4001-9e98-82d881fb74d8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.855619 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7bed1d58-1604-4e6f-a217-244cdc5c77e1" (UID: "7bed1d58-1604-4e6f-a217-244cdc5c77e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:01 crc kubenswrapper[4852]: I1201 20:08:01.927648 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed1d58-1604-4e6f-a217-244cdc5c77e1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.594668 4852 generic.go:334] "Generic (PLEG): container finished" podID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerID="b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951" exitCode=0 Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.594837 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerDied","Data":"b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.597949 4852 generic.go:334] "Generic (PLEG): container finished" podID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerID="30f9864541ab917d68fc183052c354e69ff3e0a1b2dd69c7a8704ab9c701de5d" exitCode=0 Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.598028 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerDied","Data":"30f9864541ab917d68fc183052c354e69ff3e0a1b2dd69c7a8704ab9c701de5d"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.601142 4852 generic.go:334] "Generic (PLEG): container finished" podID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerID="8d304bfd3e07a2ab81242de286f6fb9bb52ed095649a9aff9821cfeda584f561" exitCode=0 Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.601306 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerDied","Data":"8d304bfd3e07a2ab81242de286f6fb9bb52ed095649a9aff9821cfeda584f561"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.606488 4852 generic.go:334] "Generic (PLEG): container finished" podID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerID="4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec" exitCode=0 Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.606569 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerDied","Data":"4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.612654 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk9c" event={"ID":"7bed1d58-1604-4e6f-a217-244cdc5c77e1","Type":"ContainerDied","Data":"bb698140e4370234679ce2459e42f120ac53b2dae3b0345253e29487cf078053"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.612730 4852 scope.go:117] "RemoveContainer" containerID="0296f47f1ff3c88fafd254d07fa02e7853958f02b51b2fe63674265f1b997f30" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.612799 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk9c" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.618358 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pz94" event={"ID":"c30293cc-0c33-4001-9e98-82d881fb74d8","Type":"ContainerDied","Data":"3bd3a101db5d908955ed2a71bc8ae57d0cbf03b87d94735042e16c963f1deddb"} Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.618642 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pz94" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.643447 4852 scope.go:117] "RemoveContainer" containerID="20c8d4ae4fc2b12d0673053d3737505bd776f5757b78ae95c26d48bcb2bbd352" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.665913 4852 scope.go:117] "RemoveContainer" containerID="8ef40f8a1ebc1f7b092a8ce8d16fb40f720930f6a8c04b050ab56143246809c3" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.674690 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.683630 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tqk9c"] Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.691909 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.699156 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9pz94"] Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.702810 4852 scope.go:117] "RemoveContainer" containerID="96670ad37131c99deab094770f0a55de0cb569b5326148e5b062f50b741cb8a1" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.726728 4852 scope.go:117] "RemoveContainer" containerID="3f3e6304abe98d73fc173c9f0274d337e07253c8dfd3cefb921d990771edec58" Dec 01 20:08:02 crc kubenswrapper[4852]: I1201 20:08:02.750159 4852 scope.go:117] "RemoveContainer" containerID="7f3ca154688157d27613973d270f5ce9d2f396ed397d6df58f9d764a9ffb673b" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.629085 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerStarted","Data":"6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799"} Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.633385 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerStarted","Data":"59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02"} Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.636032 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerStarted","Data":"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891"} Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.640696 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerStarted","Data":"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229"} Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.671662 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sr6w6" podStartSLOduration=3.085606273 podStartE2EDuration="1m0.671641578s" podCreationTimestamp="2025-12-01 20:07:03 +0000 UTC" firstStartedPulling="2025-12-01 20:07:05.647513692 +0000 UTC m=+145.574595109" lastFinishedPulling="2025-12-01 20:08:03.233549007 +0000 UTC m=+203.160630414" observedRunningTime="2025-12-01 20:08:03.668781345 +0000 UTC m=+203.595862772" watchObservedRunningTime="2025-12-01 20:08:03.671641578 +0000 UTC m=+203.598722995" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.671975 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4vvkx" podStartSLOduration=3.058413515 podStartE2EDuration="1m0.671971279s" podCreationTimestamp="2025-12-01 20:07:03 +0000 UTC" firstStartedPulling="2025-12-01 20:07:05.633432888 +0000 UTC m=+145.560514305" lastFinishedPulling="2025-12-01 20:08:03.246990652 +0000 UTC m=+203.174072069" observedRunningTime="2025-12-01 20:08:03.653221931 +0000 UTC m=+203.580303358" watchObservedRunningTime="2025-12-01 20:08:03.671971279 +0000 UTC m=+203.599052696" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.688362 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f5255" podStartSLOduration=2.019756788 podStartE2EDuration="58.68833567s" podCreationTimestamp="2025-12-01 20:07:05 +0000 UTC" firstStartedPulling="2025-12-01 20:07:06.675246042 +0000 UTC m=+146.602327459" lastFinishedPulling="2025-12-01 20:08:03.343824924 +0000 UTC m=+203.270906341" observedRunningTime="2025-12-01 20:08:03.686108928 +0000 UTC m=+203.613190345" watchObservedRunningTime="2025-12-01 20:08:03.68833567 +0000 UTC m=+203.615417087" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.711717 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sl8rl" podStartSLOduration=3.196113376 podStartE2EDuration="58.711691988s" podCreationTimestamp="2025-12-01 20:07:05 +0000 UTC" firstStartedPulling="2025-12-01 20:07:07.759804062 +0000 UTC m=+147.686885479" lastFinishedPulling="2025-12-01 20:08:03.275382674 +0000 UTC m=+203.202464091" observedRunningTime="2025-12-01 20:08:03.70773317 +0000 UTC m=+203.634814587" watchObservedRunningTime="2025-12-01 20:08:03.711691988 +0000 UTC m=+203.638773405" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.962476 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:03 crc kubenswrapper[4852]: I1201 20:08:03.962599 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:04 crc kubenswrapper[4852]: I1201 20:08:04.309446 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:04 crc kubenswrapper[4852]: I1201 20:08:04.310003 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:04 crc kubenswrapper[4852]: I1201 20:08:04.326575 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" path="/var/lib/kubelet/pods/7bed1d58-1604-4e6f-a217-244cdc5c77e1/volumes" Dec 01 20:08:04 crc kubenswrapper[4852]: I1201 20:08:04.327224 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" path="/var/lib/kubelet/pods/c30293cc-0c33-4001-9e98-82d881fb74d8/volumes" Dec 01 20:08:05 crc kubenswrapper[4852]: I1201 20:08:05.005051 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-4vvkx" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="registry-server" probeResult="failure" output=< Dec 01 20:08:05 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:08:05 crc kubenswrapper[4852]: > Dec 01 20:08:05 crc kubenswrapper[4852]: I1201 20:08:05.350629 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-sr6w6" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="registry-server" probeResult="failure" output=< Dec 01 20:08:05 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:08:05 crc kubenswrapper[4852]: > Dec 01 20:08:05 crc kubenswrapper[4852]: I1201 20:08:05.891536 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:05 crc kubenswrapper[4852]: I1201 20:08:05.891624 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:05 crc kubenswrapper[4852]: I1201 20:08:05.953696 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:06 crc kubenswrapper[4852]: I1201 20:08:06.294049 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:06 crc kubenswrapper[4852]: I1201 20:08:06.294550 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:06 crc kubenswrapper[4852]: I1201 20:08:06.364763 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:08 crc kubenswrapper[4852]: I1201 20:08:08.675784 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" podUID="ff119331-8296-43c6-abd8-2da90ba021a6" containerName="oauth-openshift" containerID="cri-o://951cb486a4034688f180d2c2c1a28f77b64156bbd2e4851f2af7cd7809737cf1" gracePeriod=15 Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.680596 4852 generic.go:334] "Generic (PLEG): container finished" podID="ff119331-8296-43c6-abd8-2da90ba021a6" containerID="951cb486a4034688f180d2c2c1a28f77b64156bbd2e4851f2af7cd7809737cf1" exitCode=0 Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.680645 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" event={"ID":"ff119331-8296-43c6-abd8-2da90ba021a6","Type":"ContainerDied","Data":"951cb486a4034688f180d2c2c1a28f77b64156bbd2e4851f2af7cd7809737cf1"} Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.680674 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" event={"ID":"ff119331-8296-43c6-abd8-2da90ba021a6","Type":"ContainerDied","Data":"9b031c7691f14bacb7db44b38774722f2b0de48b6c3f15c2f14485756f19f5e4"} Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.680691 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b031c7691f14bacb7db44b38774722f2b0de48b6c3f15c2f14485756f19f5e4" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.718225 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845308 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845373 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbwsw\" (UniqueName: \"kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845399 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845422 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845468 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845507 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845538 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845571 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845600 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845634 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845667 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845704 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845733 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845767 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs\") pod \"ff119331-8296-43c6-abd8-2da90ba021a6\" (UID: \"ff119331-8296-43c6-abd8-2da90ba021a6\") " Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.845783 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.846008 4852 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff119331-8296-43c6-abd8-2da90ba021a6-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.846698 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.846708 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.847372 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.847500 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.854131 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.857779 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.858349 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.858439 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.859793 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw" (OuterVolumeSpecName: "kube-api-access-vbwsw") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "kube-api-access-vbwsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.861937 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.861950 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.862155 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.863469 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "ff119331-8296-43c6-abd8-2da90ba021a6" (UID: "ff119331-8296-43c6-abd8-2da90ba021a6"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947382 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947425 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947440 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947466 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbwsw\" (UniqueName: \"kubernetes.io/projected/ff119331-8296-43c6-abd8-2da90ba021a6-kube-api-access-vbwsw\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947477 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947487 4852 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947499 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947511 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947523 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947535 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947546 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947555 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:09 crc kubenswrapper[4852]: I1201 20:08:09.947564 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ff119331-8296-43c6-abd8-2da90ba021a6-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:10 crc kubenswrapper[4852]: I1201 20:08:10.688432 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-n7xgz" Dec 01 20:08:10 crc kubenswrapper[4852]: I1201 20:08:10.721583 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:08:10 crc kubenswrapper[4852]: I1201 20:08:10.728647 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-n7xgz"] Dec 01 20:08:12 crc kubenswrapper[4852]: I1201 20:08:12.331236 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff119331-8296-43c6-abd8-2da90ba021a6" path="/var/lib/kubelet/pods/ff119331-8296-43c6-abd8-2da90ba021a6/volumes" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817381 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817729 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817752 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817775 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="extract-utilities" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817787 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="extract-utilities" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817810 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="extract-content" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817825 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="extract-content" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817848 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff119331-8296-43c6-abd8-2da90ba021a6" containerName="oauth-openshift" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817861 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff119331-8296-43c6-abd8-2da90ba021a6" containerName="oauth-openshift" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817880 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="extract-utilities" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817892 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="extract-utilities" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817913 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="extract-content" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817925 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="extract-content" Dec 01 20:08:13 crc kubenswrapper[4852]: E1201 20:08:13.817943 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.817956 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.818147 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30293cc-0c33-4001-9e98-82d881fb74d8" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.818172 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bed1d58-1604-4e6f-a217-244cdc5c77e1" containerName="registry-server" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.818208 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff119331-8296-43c6-abd8-2da90ba021a6" containerName="oauth-openshift" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.818806 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.822858 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.824002 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.824433 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.824494 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.824617 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.824740 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.825009 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.825319 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.825660 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.825730 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.825975 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.829720 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.837962 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.840487 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.843070 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.850706 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911144 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911231 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911277 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crq6k\" (UniqueName: \"kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911316 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911343 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911790 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911863 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911920 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.911984 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.912042 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.912077 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.912115 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.912145 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:13 crc kubenswrapper[4852]: I1201 20:08:13.912204 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.013935 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014027 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014086 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014129 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014164 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014199 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014232 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014228 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.014259 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016037 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016125 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016204 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016263 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crq6k\" (UniqueName: \"kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016364 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.017379 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.017447 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.016315 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.020144 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.022765 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.022907 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.023191 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.023196 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.023746 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.025057 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.025189 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.026004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.041736 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.047887 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crq6k\" (UniqueName: \"kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k\") pod \"oauth-openshift-8488df84f9-xq52p\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.091410 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.147562 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.374167 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.395581 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.430701 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:14 crc kubenswrapper[4852]: I1201 20:08:14.715505 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" event={"ID":"8960bbfc-ef97-4fb2-88e1-50d030de34a2","Type":"ContainerStarted","Data":"144af989d1b1486f6f411dd22cd870db105717fa8316759e08bf72138d262ac7"} Dec 01 20:08:15 crc kubenswrapper[4852]: I1201 20:08:15.291412 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:08:15 crc kubenswrapper[4852]: I1201 20:08:15.726542 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" event={"ID":"8960bbfc-ef97-4fb2-88e1-50d030de34a2","Type":"ContainerStarted","Data":"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1"} Dec 01 20:08:15 crc kubenswrapper[4852]: I1201 20:08:15.726807 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sr6w6" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="registry-server" containerID="cri-o://212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229" gracePeriod=2 Dec 01 20:08:15 crc kubenswrapper[4852]: I1201 20:08:15.770246 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" podStartSLOduration=32.770215445 podStartE2EDuration="32.770215445s" podCreationTimestamp="2025-12-01 20:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:08:15.765207323 +0000 UTC m=+215.692288740" watchObservedRunningTime="2025-12-01 20:08:15.770215445 +0000 UTC m=+215.697296872" Dec 01 20:08:15 crc kubenswrapper[4852]: I1201 20:08:15.941913 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.150364 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.254711 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwxm4\" (UniqueName: \"kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4\") pod \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.254855 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities\") pod \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.254963 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content\") pod \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\" (UID: \"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8\") " Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.256109 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities" (OuterVolumeSpecName: "utilities") pod "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" (UID: "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.262067 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4" (OuterVolumeSpecName: "kube-api-access-kwxm4") pod "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" (UID: "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8"). InnerVolumeSpecName "kube-api-access-kwxm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.311717 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" (UID: "7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.339260 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.356831 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwxm4\" (UniqueName: \"kubernetes.io/projected/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-kube-api-access-kwxm4\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.356901 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.356927 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.740912 4852 generic.go:334] "Generic (PLEG): container finished" podID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerID="212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229" exitCode=0 Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.741009 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerDied","Data":"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229"} Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.741056 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sr6w6" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.741096 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sr6w6" event={"ID":"7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8","Type":"ContainerDied","Data":"8e2c5db96a1f593aeafb5d71e81a4991877bcf949bbf2254d9d11caf7da65d79"} Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.741146 4852 scope.go:117] "RemoveContainer" containerID="212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.742795 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.751920 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.774681 4852 scope.go:117] "RemoveContainer" containerID="b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.776758 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.782521 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sr6w6"] Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.803287 4852 scope.go:117] "RemoveContainer" containerID="b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.845611 4852 scope.go:117] "RemoveContainer" containerID="212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229" Dec 01 20:08:16 crc kubenswrapper[4852]: E1201 20:08:16.850180 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229\": container with ID starting with 212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229 not found: ID does not exist" containerID="212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.850248 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229"} err="failed to get container status \"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229\": rpc error: code = NotFound desc = could not find container \"212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229\": container with ID starting with 212f812f32c93de3b526fe8c7579f1e4189a77c61cfdd46412cad7e60b514229 not found: ID does not exist" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.850341 4852 scope.go:117] "RemoveContainer" containerID="b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951" Dec 01 20:08:16 crc kubenswrapper[4852]: E1201 20:08:16.850973 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951\": container with ID starting with b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951 not found: ID does not exist" containerID="b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.851016 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951"} err="failed to get container status \"b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951\": rpc error: code = NotFound desc = could not find container \"b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951\": container with ID starting with b0ee4a2df0e0fc49526a01dc1c71c22fa07ba37c5dc3d33a21eb68ab7dc89951 not found: ID does not exist" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.851043 4852 scope.go:117] "RemoveContainer" containerID="b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f" Dec 01 20:08:16 crc kubenswrapper[4852]: E1201 20:08:16.851773 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f\": container with ID starting with b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f not found: ID does not exist" containerID="b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f" Dec 01 20:08:16 crc kubenswrapper[4852]: I1201 20:08:16.851815 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f"} err="failed to get container status \"b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f\": rpc error: code = NotFound desc = could not find container \"b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f\": container with ID starting with b22657556c3b212f588fbefa3cfbfb40f3cc2d22de19e89155fa0b88921ccb4f not found: ID does not exist" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.285687 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.286349 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sl8rl" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="registry-server" containerID="cri-o://c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891" gracePeriod=2 Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.328844 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" path="/var/lib/kubelet/pods/7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8/volumes" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.668824 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.759430 4852 generic.go:334] "Generic (PLEG): container finished" podID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerID="c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891" exitCode=0 Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.759505 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerDied","Data":"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891"} Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.759584 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sl8rl" event={"ID":"f60b7d08-82e1-4c52-a49b-81ef0fd12bff","Type":"ContainerDied","Data":"0046c8693d2a339320581d9fed8e7a110da163495ef2e928520368f64a3be99b"} Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.759612 4852 scope.go:117] "RemoveContainer" containerID="c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.759637 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sl8rl" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.776132 4852 scope.go:117] "RemoveContainer" containerID="4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.791928 4852 scope.go:117] "RemoveContainer" containerID="a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.814119 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpwzh\" (UniqueName: \"kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh\") pod \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.814327 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content\") pod \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.814382 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities\") pod \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\" (UID: \"f60b7d08-82e1-4c52-a49b-81ef0fd12bff\") " Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.817817 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities" (OuterVolumeSpecName: "utilities") pod "f60b7d08-82e1-4c52-a49b-81ef0fd12bff" (UID: "f60b7d08-82e1-4c52-a49b-81ef0fd12bff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.820668 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh" (OuterVolumeSpecName: "kube-api-access-cpwzh") pod "f60b7d08-82e1-4c52-a49b-81ef0fd12bff" (UID: "f60b7d08-82e1-4c52-a49b-81ef0fd12bff"). InnerVolumeSpecName "kube-api-access-cpwzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.823256 4852 scope.go:117] "RemoveContainer" containerID="c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891" Dec 01 20:08:18 crc kubenswrapper[4852]: E1201 20:08:18.823727 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891\": container with ID starting with c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891 not found: ID does not exist" containerID="c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.823772 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891"} err="failed to get container status \"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891\": rpc error: code = NotFound desc = could not find container \"c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891\": container with ID starting with c5cf4067d4b84bc8983724944f7347d51eecc50d0bbcaa3b4035abda731e5891 not found: ID does not exist" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.823804 4852 scope.go:117] "RemoveContainer" containerID="4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec" Dec 01 20:08:18 crc kubenswrapper[4852]: E1201 20:08:18.824534 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec\": container with ID starting with 4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec not found: ID does not exist" containerID="4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.824590 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec"} err="failed to get container status \"4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec\": rpc error: code = NotFound desc = could not find container \"4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec\": container with ID starting with 4b1008e66f765710b9f6ebcdf3e4838ac21233a0b376e56f5a41d554ec8f02ec not found: ID does not exist" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.824610 4852 scope.go:117] "RemoveContainer" containerID="a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005" Dec 01 20:08:18 crc kubenswrapper[4852]: E1201 20:08:18.825599 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005\": container with ID starting with a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005 not found: ID does not exist" containerID="a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.825659 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005"} err="failed to get container status \"a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005\": rpc error: code = NotFound desc = could not find container \"a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005\": container with ID starting with a0526c0e781f16572be6ad2d0e08c3756d039db861f3470f2e1ef0c9870a5005 not found: ID does not exist" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.836001 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f60b7d08-82e1-4c52-a49b-81ef0fd12bff" (UID: "f60b7d08-82e1-4c52-a49b-81ef0fd12bff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.916895 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.917879 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:18 crc kubenswrapper[4852]: I1201 20:08:18.918085 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpwzh\" (UniqueName: \"kubernetes.io/projected/f60b7d08-82e1-4c52-a49b-81ef0fd12bff-kube-api-access-cpwzh\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:19 crc kubenswrapper[4852]: I1201 20:08:19.090681 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:08:19 crc kubenswrapper[4852]: I1201 20:08:19.094122 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sl8rl"] Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.229781 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.230581 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.230669 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.231674 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.231782 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012" gracePeriod=600 Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.334230 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" path="/var/lib/kubelet/pods/f60b7d08-82e1-4c52-a49b-81ef0fd12bff/volumes" Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.778306 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012" exitCode=0 Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.778557 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012"} Dec 01 20:08:20 crc kubenswrapper[4852]: I1201 20:08:20.779067 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.481547 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sxthp"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.484007 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sxthp" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="registry-server" containerID="cri-o://f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab" gracePeriod=30 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.496622 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4vvkx"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.497491 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4vvkx" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="registry-server" containerID="cri-o://6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799" gracePeriod=30 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.507793 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zfljp"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.508204 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" containerID="cri-o://71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d" gracePeriod=30 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.517945 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5255"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.518712 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f5255" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="registry-server" containerID="cri-o://59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02" gracePeriod=30 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.540648 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dwvx7"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.541209 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dwvx7" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="registry-server" containerID="cri-o://9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772" gracePeriod=30 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.563935 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vbxrx"] Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564498 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="extract-utilities" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564523 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="extract-utilities" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564539 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="extract-content" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564548 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="extract-content" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564558 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564568 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564595 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564605 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564616 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="extract-content" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564624 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="extract-content" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.564650 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="extract-utilities" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564659 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="extract-utilities" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564907 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f60b7d08-82e1-4c52-a49b-81ef0fd12bff" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.564929 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e84ebd6-e0f7-4b3b-851c-0fe7f8f77fd8" containerName="registry-server" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.565935 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.567535 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vbxrx"] Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.659895 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10a3a8e7_980e_4015_9418_f0854d431b85.slice/crio-9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb095312_fe29_458a_b5d5_8fd607a89e8b.slice/crio-6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea3aaa39_f0ff_4aaf_858f_8b9824e35d0e.slice/crio-f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9e7ffee_9028_45cb_83bd_3d5a0019ab16.slice/crio-59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65c26ddd_9a26_4b9c_b3fa_74827d33872a.slice/crio-71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.702636 4852 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.703641 4852 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.703901 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704075 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38" gracePeriod=15 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704153 4852 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704202 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a" gracePeriod=15 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704223 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1" gracePeriod=15 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704288 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5" gracePeriod=15 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704116 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d" gracePeriod=15 Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704443 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704558 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704588 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704597 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704626 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704637 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704654 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704662 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704675 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704684 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704696 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704703 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.704715 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704724 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704898 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704918 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704928 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704937 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704949 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.704964 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.710069 4852 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.739823 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.739928 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.739974 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842384 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842465 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842507 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842554 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842588 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842604 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842635 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842674 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842724 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.842750 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.843118 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.843559 4852 projected.go:194] Error preparing data for projected volume kube-api-access-8q5bf for pod openshift-marketplace/marketplace-operator-79b997595-vbxrx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.843617 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf podName:674a3020-de73-41ff-b140-3ab2bc9d11aa nodeName:}" failed. No retries permitted until 2025-12-01 20:08:33.343596718 +0000 UTC m=+233.270678135 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-8q5bf" (UniqueName: "kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf") pod "marketplace-operator-79b997595-vbxrx" (UID: "674a3020-de73-41ff-b140-3ab2bc9d11aa") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.844021 4852 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.219:6443: connect: connection refused" event="&Event{ObjectMeta:{marketplace-operator-79b997595-vbxrx.187d30478bb1a928 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-vbxrx,UID:674a3020-de73-41ff-b140-3ab2bc9d11aa,APIVersion:v1,ResourceVersion:29356,FieldPath:,},Reason:FailedMount,Message:MountVolume.SetUp failed for volume \"kube-api-access-8q5bf\" : failed to fetch token: Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token\": dial tcp 38.102.83.219:6443: connect: connection refused,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,LastTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.845748 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.851310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/674a3020-de73-41ff-b140-3ab2bc9d11aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.861082 4852 generic.go:334] "Generic (PLEG): container finished" podID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerID="59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.861141 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerDied","Data":"59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.864440 4852 generic.go:334] "Generic (PLEG): container finished" podID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerID="f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.864525 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerDied","Data":"f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.867122 4852 generic.go:334] "Generic (PLEG): container finished" podID="10a3a8e7-980e-4015-9418-f0854d431b85" containerID="9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.867251 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerDied","Data":"9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.870253 4852 generic.go:334] "Generic (PLEG): container finished" podID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerID="71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.870294 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" event={"ID":"65c26ddd-9a26-4b9c-b3fa-74827d33872a","Type":"ContainerDied","Data":"71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.873182 4852 generic.go:334] "Generic (PLEG): container finished" podID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerID="6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.873243 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerDied","Data":"6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799"} Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.876182 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.878171 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.878988 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.879020 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.879033 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1" exitCode=0 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.879043 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a" exitCode=2 Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.879087 4852 scope.go:117] "RemoveContainer" containerID="0007f6efeac428c6ef3087ba5663b798d25b7d8090c7083f3243407ecf7b1995" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944009 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944054 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944077 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944106 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944126 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944140 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944151 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944191 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944204 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944219 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944175 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944254 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944261 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944220 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944291 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.944348 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: E1201 20:08:32.987708 4852 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.219:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.988500 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.989528 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.990174 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.995821 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.996493 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:32 crc kubenswrapper[4852]: I1201 20:08:32.996920 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.001195 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.001636 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.002215 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.002560 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: W1201 20:08:33.012791 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-2f8a7e53347afe3c886ba2028e1f27c7d87b899e95adb590af883789e46ac2d0 WatchSource:0}: Error finding container 2f8a7e53347afe3c886ba2028e1f27c7d87b899e95adb590af883789e46ac2d0: Status 404 returned error can't find the container with id 2f8a7e53347afe3c886ba2028e1f27c7d87b899e95adb590af883789e46ac2d0 Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.147084 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glmlr\" (UniqueName: \"kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr\") pod \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.147787 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqzn7\" (UniqueName: \"kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7\") pod \"10a3a8e7-980e-4015-9418-f0854d431b85\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.148141 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlrpk\" (UniqueName: \"kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk\") pod \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.148547 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content\") pod \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.148749 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca\") pod \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.148922 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities\") pod \"10a3a8e7-980e-4015-9418-f0854d431b85\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.149016 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics\") pod \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\" (UID: \"65c26ddd-9a26-4b9c-b3fa-74827d33872a\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.149139 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content\") pod \"10a3a8e7-980e-4015-9418-f0854d431b85\" (UID: \"10a3a8e7-980e-4015-9418-f0854d431b85\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.149775 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities" (OuterVolumeSpecName: "utilities") pod "10a3a8e7-980e-4015-9418-f0854d431b85" (UID: "10a3a8e7-980e-4015-9418-f0854d431b85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.149898 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "65c26ddd-9a26-4b9c-b3fa-74827d33872a" (UID: "65c26ddd-9a26-4b9c-b3fa-74827d33872a"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.150071 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities\") pod \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\" (UID: \"f9e7ffee-9028-45cb-83bd-3d5a0019ab16\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.150778 4852 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.150904 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.150846 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities" (OuterVolumeSpecName: "utilities") pod "f9e7ffee-9028-45cb-83bd-3d5a0019ab16" (UID: "f9e7ffee-9028-45cb-83bd-3d5a0019ab16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.151117 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7" (OuterVolumeSpecName: "kube-api-access-cqzn7") pod "10a3a8e7-980e-4015-9418-f0854d431b85" (UID: "10a3a8e7-980e-4015-9418-f0854d431b85"). InnerVolumeSpecName "kube-api-access-cqzn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.155529 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr" (OuterVolumeSpecName: "kube-api-access-glmlr") pod "f9e7ffee-9028-45cb-83bd-3d5a0019ab16" (UID: "f9e7ffee-9028-45cb-83bd-3d5a0019ab16"). InnerVolumeSpecName "kube-api-access-glmlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.155772 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "65c26ddd-9a26-4b9c-b3fa-74827d33872a" (UID: "65c26ddd-9a26-4b9c-b3fa-74827d33872a"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.158299 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk" (OuterVolumeSpecName: "kube-api-access-rlrpk") pod "65c26ddd-9a26-4b9c-b3fa-74827d33872a" (UID: "65c26ddd-9a26-4b9c-b3fa-74827d33872a"). InnerVolumeSpecName "kube-api-access-rlrpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.168689 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9e7ffee-9028-45cb-83bd-3d5a0019ab16" (UID: "f9e7ffee-9028-45cb-83bd-3d5a0019ab16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252621 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252653 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glmlr\" (UniqueName: \"kubernetes.io/projected/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-kube-api-access-glmlr\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252662 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqzn7\" (UniqueName: \"kubernetes.io/projected/10a3a8e7-980e-4015-9418-f0854d431b85-kube-api-access-cqzn7\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252673 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlrpk\" (UniqueName: \"kubernetes.io/projected/65c26ddd-9a26-4b9c-b3fa-74827d33872a-kube-api-access-rlrpk\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252684 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9e7ffee-9028-45cb-83bd-3d5a0019ab16-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.252693 4852 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/65c26ddd-9a26-4b9c-b3fa-74827d33872a-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.266515 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10a3a8e7-980e-4015-9418-f0854d431b85" (UID: "10a3a8e7-980e-4015-9418-f0854d431b85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.353769 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.353867 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10a3a8e7-980e-4015-9418-f0854d431b85-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.354815 4852 projected.go:194] Error preparing data for projected volume kube-api-access-8q5bf for pod openshift-marketplace/marketplace-operator-79b997595-vbxrx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.354941 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf podName:674a3020-de73-41ff-b140-3ab2bc9d11aa nodeName:}" failed. No retries permitted until 2025-12-01 20:08:34.354915116 +0000 UTC m=+234.281996613 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-8q5bf" (UniqueName: "kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf") pod "marketplace-operator-79b997595-vbxrx" (UID: "674a3020-de73-41ff-b140-3ab2bc9d11aa") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.468935 4852 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.469424 4852 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.469650 4852 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.469831 4852 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.470035 4852 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.470068 4852 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.470249 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="200ms" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.510759 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.511439 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.511813 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.512303 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.512736 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.573038 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.573730 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.574326 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.574872 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.575162 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.575471 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.658087 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vztx\" (UniqueName: \"kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx\") pod \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.658201 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content\") pod \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.658247 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities\") pod \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\" (UID: \"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.659416 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities" (OuterVolumeSpecName: "utilities") pod "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" (UID: "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.664864 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx" (OuterVolumeSpecName: "kube-api-access-8vztx") pod "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" (UID: "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e"). InnerVolumeSpecName "kube-api-access-8vztx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.671196 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="400ms" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.717097 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" (UID: "ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759312 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities\") pod \"bb095312-fe29-458a-b5d5-8fd607a89e8b\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759470 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8tnt\" (UniqueName: \"kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt\") pod \"bb095312-fe29-458a-b5d5-8fd607a89e8b\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759509 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content\") pod \"bb095312-fe29-458a-b5d5-8fd607a89e8b\" (UID: \"bb095312-fe29-458a-b5d5-8fd607a89e8b\") " Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759737 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vztx\" (UniqueName: \"kubernetes.io/projected/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-kube-api-access-8vztx\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759749 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.759758 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.760025 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities" (OuterVolumeSpecName: "utilities") pod "bb095312-fe29-458a-b5d5-8fd607a89e8b" (UID: "bb095312-fe29-458a-b5d5-8fd607a89e8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.762934 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt" (OuterVolumeSpecName: "kube-api-access-x8tnt") pod "bb095312-fe29-458a-b5d5-8fd607a89e8b" (UID: "bb095312-fe29-458a-b5d5-8fd607a89e8b"). InnerVolumeSpecName "kube-api-access-x8tnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.812866 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb095312-fe29-458a-b5d5-8fd607a89e8b" (UID: "bb095312-fe29-458a-b5d5-8fd607a89e8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.861758 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8tnt\" (UniqueName: \"kubernetes.io/projected/bb095312-fe29-458a-b5d5-8fd607a89e8b-kube-api-access-x8tnt\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.861810 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.861820 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb095312-fe29-458a-b5d5-8fd607a89e8b-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.889070 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" event={"ID":"65c26ddd-9a26-4b9c-b3fa-74827d33872a","Type":"ContainerDied","Data":"c41583862bbb77359b155d266200f6ec2c849492a0000dced631240c80f08f75"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.889086 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.889152 4852 scope.go:117] "RemoveContainer" containerID="71cc014ede15ec474bb10d125f5e910ca394881227fd16e7962447850c7d443d" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.890557 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.891311 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.891996 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.892665 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.892870 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dwvx7" event={"ID":"10a3a8e7-980e-4015-9418-f0854d431b85","Type":"ContainerDied","Data":"ebb2b64352783fb7263744185a790ef65a6530bf758493197e8ec831180c1039"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.892945 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dwvx7" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.893579 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.894282 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.894741 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.894949 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.895039 4852 generic.go:334] "Generic (PLEG): container finished" podID="eb1af6df-557b-4088-8ea4-6f1940ddd885" containerID="64e1052e537a1aceaf673534962df52d48e33aa8e6a775aeabf65951169081cc" exitCode=0 Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.895108 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eb1af6df-557b-4088-8ea4-6f1940ddd885","Type":"ContainerDied","Data":"64e1052e537a1aceaf673534962df52d48e33aa8e6a775aeabf65951169081cc"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.895202 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.895439 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.896665 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.897132 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.897518 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.897810 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.898228 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.898715 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.899058 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4vvkx" event={"ID":"bb095312-fe29-458a-b5d5-8fd607a89e8b","Type":"ContainerDied","Data":"9f8f488bfdf1d755934c3cda0b248738731f1fc497a154ec76a5bdcc69fbe838"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.899101 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4vvkx" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.899976 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.900612 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.901195 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.901613 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.902098 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.902539 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.904421 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.909051 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.909110 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2f8a7e53347afe3c886ba2028e1f27c7d87b899e95adb590af883789e46ac2d0"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.910041 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: E1201 20:08:33.910164 4852 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.219:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.910320 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.910595 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.910787 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.911025 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.911235 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.911756 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.912172 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5255" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.912250 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5255" event={"ID":"f9e7ffee-9028-45cb-83bd-3d5a0019ab16","Type":"ContainerDied","Data":"f29b32314b5b527d8de893f709f403cf7d01c17c398baaef117491aadc1a0df6"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.912833 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.914303 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.916145 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.916636 4852 scope.go:117] "RemoveContainer" containerID="9c8a582c46ade620c64b428895e727abe7c17878049b24a52bfda4aaab4c0772" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.917051 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxthp" event={"ID":"ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e","Type":"ContainerDied","Data":"65af9180016367e74ea0b0f617241d903076b64a16467a74733d1255cd091a58"} Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.917169 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxthp" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.917414 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.918162 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.918795 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.919476 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.920207 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.921594 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.921782 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.921937 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.930206 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.931171 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.931707 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.932209 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.932988 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.933383 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.933699 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.933909 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.934370 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.934831 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.935321 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.935877 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.942117 4852 scope.go:117] "RemoveContainer" containerID="c5a91396e66eeacac4df8acca894570eb56a767e903d28fe192e4812c2b5b1dc" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.969355 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.970157 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.970547 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.970729 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.970908 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.971086 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.971350 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.971576 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.971760 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.971963 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.972124 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.972295 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:33 crc kubenswrapper[4852]: I1201 20:08:33.994615 4852 scope.go:117] "RemoveContainer" containerID="a2841a2ed43bf7ba08988da1cd541ad8da84c85e890c0b169c3198465ba300c4" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.011610 4852 scope.go:117] "RemoveContainer" containerID="6a9e849c71a1551776da545d971828103075896b765751cc6254ec6cbe5e3799" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.027540 4852 scope.go:117] "RemoveContainer" containerID="30f9864541ab917d68fc183052c354e69ff3e0a1b2dd69c7a8704ab9c701de5d" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.044752 4852 scope.go:117] "RemoveContainer" containerID="2ad9a2ff2103f27ace8ccbbc55f076bded99b65f5330ba746b2d2e17e1a3962a" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.058176 4852 scope.go:117] "RemoveContainer" containerID="59ee59a57f543518007089b4cccadf68e38d464be8ceb142f132610404d5fc02" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.071704 4852 scope.go:117] "RemoveContainer" containerID="8d304bfd3e07a2ab81242de286f6fb9bb52ed095649a9aff9821cfeda584f561" Dec 01 20:08:34 crc kubenswrapper[4852]: E1201 20:08:34.072152 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="800ms" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.088350 4852 scope.go:117] "RemoveContainer" containerID="8400a975fd7fc38e06882d1d94603a2c7e078c44a7af7573ae36e052b08eb3e0" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.105250 4852 scope.go:117] "RemoveContainer" containerID="f653dcfe30a3457db1657ab3866adc036598a6649aa96007c674135d802f15ab" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.121879 4852 scope.go:117] "RemoveContainer" containerID="33a52364302e089cb24ba73405b3dad0f5ea7d312fd6a559056ae64de1f3a949" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.143254 4852 scope.go:117] "RemoveContainer" containerID="8e1cc78c549adb6205db3f36f9de3850f886a8d6482c68abfb05d59c7abf802c" Dec 01 20:08:34 crc kubenswrapper[4852]: I1201 20:08:34.375383 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:34 crc kubenswrapper[4852]: E1201 20:08:34.376308 4852 projected.go:194] Error preparing data for projected volume kube-api-access-8q5bf for pod openshift-marketplace/marketplace-operator-79b997595-vbxrx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:34 crc kubenswrapper[4852]: E1201 20:08:34.376410 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf podName:674a3020-de73-41ff-b140-3ab2bc9d11aa nodeName:}" failed. No retries permitted until 2025-12-01 20:08:36.376383233 +0000 UTC m=+236.303464660 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-8q5bf" (UniqueName: "kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf") pod "marketplace-operator-79b997595-vbxrx" (UID: "674a3020-de73-41ff-b140-3ab2bc9d11aa") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:34 crc kubenswrapper[4852]: E1201 20:08:34.874150 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="1.6s" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.093693 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.095268 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.096225 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.096948 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.097655 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.097961 4852 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.098404 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.099279 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.100432 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191568 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191718 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191756 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191843 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191900 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.191984 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.192224 4852 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.192253 4852 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.192264 4852 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.202111 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.203087 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.203564 4852 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.204184 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.204549 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.204863 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.205200 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.205652 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.293608 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir\") pod \"eb1af6df-557b-4088-8ea4-6f1940ddd885\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.293672 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock\") pod \"eb1af6df-557b-4088-8ea4-6f1940ddd885\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.293729 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access\") pod \"eb1af6df-557b-4088-8ea4-6f1940ddd885\" (UID: \"eb1af6df-557b-4088-8ea4-6f1940ddd885\") " Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.293838 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock" (OuterVolumeSpecName: "var-lock") pod "eb1af6df-557b-4088-8ea4-6f1940ddd885" (UID: "eb1af6df-557b-4088-8ea4-6f1940ddd885"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.293902 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "eb1af6df-557b-4088-8ea4-6f1940ddd885" (UID: "eb1af6df-557b-4088-8ea4-6f1940ddd885"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.294153 4852 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.294179 4852 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eb1af6df-557b-4088-8ea4-6f1940ddd885-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.300514 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "eb1af6df-557b-4088-8ea4-6f1940ddd885" (UID: "eb1af6df-557b-4088-8ea4-6f1940ddd885"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.395665 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb1af6df-557b-4088-8ea4-6f1940ddd885-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:35 crc kubenswrapper[4852]: E1201 20:08:35.828177 4852 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.219:6443: connect: connection refused" event="&Event{ObjectMeta:{marketplace-operator-79b997595-vbxrx.187d30478bb1a928 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-vbxrx,UID:674a3020-de73-41ff-b140-3ab2bc9d11aa,APIVersion:v1,ResourceVersion:29356,FieldPath:,},Reason:FailedMount,Message:MountVolume.SetUp failed for volume \"kube-api-access-8q5bf\" : failed to fetch token: Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token\": dial tcp 38.102.83.219:6443: connect: connection refused,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,LastTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.942653 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.943649 4852 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38" exitCode=0 Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.943752 4852 scope.go:117] "RemoveContainer" containerID="f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.943784 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.945996 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eb1af6df-557b-4088-8ea4-6f1940ddd885","Type":"ContainerDied","Data":"c1106c221fbfea3d41c35685fa944061e53f4fe9a3b81032f136ca90bca9fb26"} Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.946033 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1106c221fbfea3d41c35685fa944061e53f4fe9a3b81032f136ca90bca9fb26" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.946106 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.963705 4852 scope.go:117] "RemoveContainer" containerID="24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.965858 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.966702 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.967166 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.967605 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.968167 4852 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.968528 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.968885 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.969554 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.970101 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.970695 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.971182 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.971804 4852 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.972728 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.973188 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.983105 4852 scope.go:117] "RemoveContainer" containerID="f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1" Dec 01 20:08:35 crc kubenswrapper[4852]: I1201 20:08:35.997368 4852 scope.go:117] "RemoveContainer" containerID="204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.013095 4852 scope.go:117] "RemoveContainer" containerID="143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.031579 4852 scope.go:117] "RemoveContainer" containerID="5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.058580 4852 scope.go:117] "RemoveContainer" containerID="f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.059219 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\": container with ID starting with f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d not found: ID does not exist" containerID="f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.059264 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d"} err="failed to get container status \"f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\": rpc error: code = NotFound desc = could not find container \"f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d\": container with ID starting with f659d870c9cadfce584a2a41d8034eb9f57296e6807c784acb8b9667ea90872d not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.059299 4852 scope.go:117] "RemoveContainer" containerID="24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.060133 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\": container with ID starting with 24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5 not found: ID does not exist" containerID="24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.060170 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5"} err="failed to get container status \"24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\": rpc error: code = NotFound desc = could not find container \"24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5\": container with ID starting with 24f8283603e85db7b1e3eb827264512051e29521f6af37d590f3e6c8ed3051b5 not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.060198 4852 scope.go:117] "RemoveContainer" containerID="f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.060803 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\": container with ID starting with f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1 not found: ID does not exist" containerID="f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.060829 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1"} err="failed to get container status \"f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\": rpc error: code = NotFound desc = could not find container \"f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1\": container with ID starting with f07482c25cc0bf383b0d654bde84868ea77b0a22f79c6eaba848b19c62a8a9e1 not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.060844 4852 scope.go:117] "RemoveContainer" containerID="204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.061228 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\": container with ID starting with 204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a not found: ID does not exist" containerID="204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.061293 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a"} err="failed to get container status \"204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\": rpc error: code = NotFound desc = could not find container \"204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a\": container with ID starting with 204caed6ec892488fe3edacb61bf67d1e0b46dd04bbf27698adda2dbb8be3c9a not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.061337 4852 scope.go:117] "RemoveContainer" containerID="143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.061709 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\": container with ID starting with 143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38 not found: ID does not exist" containerID="143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.061763 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38"} err="failed to get container status \"143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\": rpc error: code = NotFound desc = could not find container \"143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38\": container with ID starting with 143be77b90de3b2d778421dab4d63610ac656533a82b154799ad80dcdc45fd38 not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.061782 4852 scope.go:117] "RemoveContainer" containerID="5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.062149 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\": container with ID starting with 5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0 not found: ID does not exist" containerID="5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.062185 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0"} err="failed to get container status \"5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\": rpc error: code = NotFound desc = could not find container \"5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0\": container with ID starting with 5ed24559695176abae24d3f341d04beb41fd05e34d088b2e1ee52c7b134eefb0 not found: ID does not exist" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.336107 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 01 20:08:36 crc kubenswrapper[4852]: I1201 20:08:36.411318 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.412274 4852 projected.go:194] Error preparing data for projected volume kube-api-access-8q5bf for pod openshift-marketplace/marketplace-operator-79b997595-vbxrx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.412367 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf podName:674a3020-de73-41ff-b140-3ab2bc9d11aa nodeName:}" failed. No retries permitted until 2025-12-01 20:08:40.412344342 +0000 UTC m=+240.339425759 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-8q5bf" (UniqueName: "kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf") pod "marketplace-operator-79b997595-vbxrx" (UID: "674a3020-de73-41ff-b140-3ab2bc9d11aa") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:36 crc kubenswrapper[4852]: E1201 20:08:36.476068 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="3.2s" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.817961 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:08:37Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:08:37Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:08:37Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T20:08:37Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.818859 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.819535 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.819853 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.820141 4852 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:37 crc kubenswrapper[4852]: E1201 20:08:37.820188 4852 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 20:08:39 crc kubenswrapper[4852]: E1201 20:08:39.678153 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="6.4s" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.324365 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.328041 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.328762 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.329174 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.329649 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.330650 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:40 crc kubenswrapper[4852]: I1201 20:08:40.480258 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:40 crc kubenswrapper[4852]: E1201 20:08:40.481724 4852 projected.go:194] Error preparing data for projected volume kube-api-access-8q5bf for pod openshift-marketplace/marketplace-operator-79b997595-vbxrx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:40 crc kubenswrapper[4852]: E1201 20:08:40.481842 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf podName:674a3020-de73-41ff-b140-3ab2bc9d11aa nodeName:}" failed. No retries permitted until 2025-12-01 20:08:48.481812188 +0000 UTC m=+248.408893615 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-8q5bf" (UniqueName: "kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf") pod "marketplace-operator-79b997595-vbxrx" (UID: "674a3020-de73-41ff-b140-3ab2bc9d11aa") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token": dial tcp 38.102.83.219:6443: connect: connection refused Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.319174 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.320762 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.321345 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.321942 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.322620 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.323154 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.323604 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.342550 4852 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.342606 4852 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:45 crc kubenswrapper[4852]: E1201 20:08:45.343355 4852 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:45 crc kubenswrapper[4852]: I1201 20:08:45.344333 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:45 crc kubenswrapper[4852]: E1201 20:08:45.829702 4852 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.219:6443: connect: connection refused" event="&Event{ObjectMeta:{marketplace-operator-79b997595-vbxrx.187d30478bb1a928 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-vbxrx,UID:674a3020-de73-41ff-b140-3ab2bc9d11aa,APIVersion:v1,ResourceVersion:29356,FieldPath:,},Reason:FailedMount,Message:MountVolume.SetUp failed for volume \"kube-api-access-8q5bf\" : failed to fetch token: Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/serviceaccounts/marketplace-operator/token\": dial tcp 38.102.83.219:6443: connect: connection refused,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,LastTimestamp:2025-12-01 20:08:32.843589928 +0000 UTC m=+232.770671345,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.018882 4852 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="6e922ad6b175b59662caff32538bdbfa4c33a10f64a6f1379476e2b7bc62b7d7" exitCode=0 Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.018990 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"6e922ad6b175b59662caff32538bdbfa4c33a10f64a6f1379476e2b7bc62b7d7"} Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.019063 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a861053996a4ad8c819c144e118239a0df48e8b43aa8ca06bef218e0620b89dc"} Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.019762 4852 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.019822 4852 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.020849 4852 status_manager.go:851] "Failed to get status for pod" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" pod="openshift-marketplace/redhat-marketplace-f5255" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f5255\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: E1201 20:08:46.021100 4852 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.021516 4852 status_manager.go:851] "Failed to get status for pod" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" pod="openshift-marketplace/certified-operators-sxthp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sxthp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.022088 4852 status_manager.go:851] "Failed to get status for pod" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.022533 4852 status_manager.go:851] "Failed to get status for pod" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" pod="openshift-marketplace/redhat-operators-dwvx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-dwvx7\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.022966 4852 status_manager.go:851] "Failed to get status for pod" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" pod="openshift-marketplace/marketplace-operator-79b997595-zfljp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-zfljp\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: I1201 20:08:46.023406 4852 status_manager.go:851] "Failed to get status for pod" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" pod="openshift-marketplace/community-operators-4vvkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4vvkx\": dial tcp 38.102.83.219:6443: connect: connection refused" Dec 01 20:08:46 crc kubenswrapper[4852]: E1201 20:08:46.081661 4852 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.219:6443: connect: connection refused" interval="7s" Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.037132 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.037565 4852 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5" exitCode=1 Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.037670 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5"} Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.039128 4852 scope.go:117] "RemoveContainer" containerID="74ebd10f47fc96264be154908584bf1da3e9b43ada3921f7a35f0d3a0adf97d5" Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.043509 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"06b0bc81a31fe5b587d1ca6573fcf4ff34a12f2f666b8201f1b88ae7c84d980f"} Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.043548 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b7aea3fee5520e3fb12fffc843e17f31392ccf036c9f4c04a489118e661d885e"} Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.043570 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e3e6784041235af4b603522db187a7827e0ae161ffd02bab58fb28cd3991914e"} Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.043583 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"10306ac7fca39ef16a7ac3c3203eba99f7775a6ddfc1d3a41e1d0d6600d1e1f6"} Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.311428 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:08:47 crc kubenswrapper[4852]: I1201 20:08:47.361242 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.057129 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.057222 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c20fe31412067a2ebffd4885a8c4329928f862f977e58ef4b0f61ee7c507ae8a"} Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.064356 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fd9bbf43e4f4c5b87c437ef69530d0f71aff56763cf74a75d420ad0e48e56e16"} Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.064876 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.064975 4852 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.065006 4852 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:48 crc kubenswrapper[4852]: I1201 20:08:48.505088 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:50 crc kubenswrapper[4852]: I1201 20:08:50.345685 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:50 crc kubenswrapper[4852]: I1201 20:08:50.345753 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:50 crc kubenswrapper[4852]: I1201 20:08:50.352537 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.074650 4852 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.082413 4852 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0efe287-9b50-4be4-ae49-8f417a66fbff\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:08:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:08:46Z\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:08:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T20:08:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10306ac7fca39ef16a7ac3c3203eba99f7775a6ddfc1d3a41e1d0d6600d1e1f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:08:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7aea3fee5520e3fb12fffc843e17f31392ccf036c9f4c04a489118e661d885e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:08:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3e6784041235af4b603522db187a7827e0ae161ffd02bab58fb28cd3991914e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:08:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd9bbf43e4f4c5b87c437ef69530d0f71aff56763cf74a75d420ad0e48e56e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:08:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06b0bc81a31fe5b587d1ca6573fcf4ff34a12f2f666b8201f1b88ae7c84d980f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T20:08:46Z\\\"}}}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e922ad6b175b59662caff32538bdbfa4c33a10f64a6f1379476e2b7bc62b7d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e922ad6b175b59662caff32538bdbfa4c33a10f64a6f1379476e2b7bc62b7d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T20:08:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T20:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}]}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": pods \"kube-apiserver-crc\" not found" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.109747 4852 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.109789 4852 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.114686 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.117696 4852 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="0e55df3b-4898-408a-940e-ae9e8e7bb619" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.544965 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q5bf\" (UniqueName: \"kubernetes.io/projected/674a3020-de73-41ff-b140-3ab2bc9d11aa-kube-api-access-8q5bf\") pod \"marketplace-operator-79b997595-vbxrx\" (UID: \"674a3020-de73-41ff-b140-3ab2bc9d11aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:53 crc kubenswrapper[4852]: I1201 20:08:53.592141 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:08:54 crc kubenswrapper[4852]: I1201 20:08:54.117165 4852 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:54 crc kubenswrapper[4852]: I1201 20:08:54.117649 4852 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b0efe287-9b50-4be4-ae49-8f417a66fbff" Dec 01 20:08:54 crc kubenswrapper[4852]: I1201 20:08:54.117265 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" event={"ID":"674a3020-de73-41ff-b140-3ab2bc9d11aa","Type":"ContainerStarted","Data":"b4cbff69c744f1fda538d8daf7ea3a6d70647fc78acfa3d38680d323fea3c3be"} Dec 01 20:08:54 crc kubenswrapper[4852]: I1201 20:08:54.122373 4852 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="0e55df3b-4898-408a-940e-ae9e8e7bb619" Dec 01 20:08:55 crc kubenswrapper[4852]: I1201 20:08:55.127514 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/0.log" Dec 01 20:08:55 crc kubenswrapper[4852]: I1201 20:08:55.128368 4852 generic.go:334] "Generic (PLEG): container finished" podID="674a3020-de73-41ff-b140-3ab2bc9d11aa" containerID="116ee40988f2b07948cc45ac421e7c0be51bf82233e9e3c1432cdf23d14d6802" exitCode=1 Dec 01 20:08:55 crc kubenswrapper[4852]: I1201 20:08:55.128421 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" event={"ID":"674a3020-de73-41ff-b140-3ab2bc9d11aa","Type":"ContainerDied","Data":"116ee40988f2b07948cc45ac421e7c0be51bf82233e9e3c1432cdf23d14d6802"} Dec 01 20:08:55 crc kubenswrapper[4852]: I1201 20:08:55.129341 4852 scope.go:117] "RemoveContainer" containerID="116ee40988f2b07948cc45ac421e7c0be51bf82233e9e3c1432cdf23d14d6802" Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.136648 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/1.log" Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.138000 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/0.log" Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.138118 4852 generic.go:334] "Generic (PLEG): container finished" podID="674a3020-de73-41ff-b140-3ab2bc9d11aa" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" exitCode=1 Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.138154 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" event={"ID":"674a3020-de73-41ff-b140-3ab2bc9d11aa","Type":"ContainerDied","Data":"393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a"} Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.138232 4852 scope.go:117] "RemoveContainer" containerID="116ee40988f2b07948cc45ac421e7c0be51bf82233e9e3c1432cdf23d14d6802" Dec 01 20:08:56 crc kubenswrapper[4852]: I1201 20:08:56.139258 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:08:56 crc kubenswrapper[4852]: E1201 20:08:56.139748 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.149951 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/1.log" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.151156 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:08:57 crc kubenswrapper[4852]: E1201 20:08:57.151726 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.312129 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.324495 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.362875 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:08:57 crc kubenswrapper[4852]: I1201 20:08:57.370003 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 20:09:03 crc kubenswrapper[4852]: I1201 20:09:03.247166 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 20:09:03 crc kubenswrapper[4852]: I1201 20:09:03.482259 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 20:09:03 crc kubenswrapper[4852]: I1201 20:09:03.592548 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:03 crc kubenswrapper[4852]: I1201 20:09:03.592674 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:03 crc kubenswrapper[4852]: I1201 20:09:03.593499 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:09:03 crc kubenswrapper[4852]: E1201 20:09:03.593875 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.038610 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.068828 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.197966 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:09:04 crc kubenswrapper[4852]: E1201 20:09:04.198240 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.578871 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.701828 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.716807 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.914021 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 20:09:04 crc kubenswrapper[4852]: I1201 20:09:04.963701 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.002244 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.048134 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.240801 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.320832 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.422012 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.486551 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.523367 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.533537 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.562502 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.714702 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.725248 4852 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.805302 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.928957 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.930977 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 20:09:05 crc kubenswrapper[4852]: I1201 20:09:05.938014 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.205274 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.232959 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.303748 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.316705 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.348431 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.483815 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.548120 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.668078 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 20:09:06 crc kubenswrapper[4852]: I1201 20:09:06.721581 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.058785 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.159962 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.227701 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.301538 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.303069 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.308868 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.402623 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.496232 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.634168 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.639477 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.694264 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.702242 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.875819 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.895612 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.970381 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 20:09:07 crc kubenswrapper[4852]: I1201 20:09:07.981064 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.004550 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.009154 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.271878 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.318073 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.383146 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.416344 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.456308 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.489696 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.509936 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.607852 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.780913 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.783986 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.810331 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 20:09:08 crc kubenswrapper[4852]: I1201 20:09:08.990658 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.093208 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.116994 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.136889 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.140995 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.335799 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.338538 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.396695 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.408932 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.416686 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.417571 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.447680 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.493306 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.653394 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.703212 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.790873 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.827729 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.858543 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 20:09:09 crc kubenswrapper[4852]: I1201 20:09:09.874844 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.124535 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.193870 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.236664 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.251071 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.286065 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.311463 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.522424 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.557201 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.574553 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.583380 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.681834 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.765911 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.776004 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.796916 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.905282 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.929015 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.932847 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.966575 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 20:09:10 crc kubenswrapper[4852]: I1201 20:09:10.995921 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.000252 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.072006 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.135859 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.190160 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.294947 4852 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.317998 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.387145 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.491302 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.513910 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.539295 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.599834 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.614733 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.681791 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.698545 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.736542 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.938998 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 20:09:11 crc kubenswrapper[4852]: I1201 20:09:11.984996 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.108185 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.152540 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.160151 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.170378 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.354756 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.386320 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.421902 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.429498 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.465001 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.508262 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.681438 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.761352 4852 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.767675 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5255","openshift-marketplace/community-operators-4vvkx","openshift-marketplace/redhat-operators-dwvx7","openshift-marketplace/marketplace-operator-79b997595-zfljp","openshift-marketplace/certified-operators-sxthp","openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.767777 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.767806 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vbxrx"] Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.768528 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.794479 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.794434568 podStartE2EDuration="19.794434568s" podCreationTimestamp="2025-12-01 20:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:09:12.790343835 +0000 UTC m=+272.717425302" watchObservedRunningTime="2025-12-01 20:09:12.794434568 +0000 UTC m=+272.721515995" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.809265 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.885277 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.902880 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.945048 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 20:09:12 crc kubenswrapper[4852]: I1201 20:09:12.988085 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.035434 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.037797 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.053175 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.083358 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.106775 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.140283 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.261679 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/2.log" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.263008 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/1.log" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.263081 4852 generic.go:334] "Generic (PLEG): container finished" podID="674a3020-de73-41ff-b140-3ab2bc9d11aa" containerID="55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834" exitCode=1 Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.263135 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" event={"ID":"674a3020-de73-41ff-b140-3ab2bc9d11aa","Type":"ContainerDied","Data":"55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834"} Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.263341 4852 scope.go:117] "RemoveContainer" containerID="393e20aa72c7151a9d38d33f502cda25d4f10e47cb732335b6baca094642133a" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.264330 4852 scope.go:117] "RemoveContainer" containerID="55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834" Dec 01 20:09:13 crc kubenswrapper[4852]: E1201 20:09:13.264887 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.298403 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.310379 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.340369 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.557954 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.582906 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.592651 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.592763 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.624332 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.632766 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.690117 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.708873 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.745571 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.797522 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.810612 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.834813 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.869028 4852 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.889324 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.905691 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.923957 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.937520 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 20:09:13 crc kubenswrapper[4852]: I1201 20:09:13.953942 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.049223 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.117127 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.195195 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.209218 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.277018 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/2.log" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.277976 4852 scope.go:117] "RemoveContainer" containerID="55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834" Dec 01 20:09:14 crc kubenswrapper[4852]: E1201 20:09:14.278309 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.287845 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.332354 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" path="/var/lib/kubelet/pods/10a3a8e7-980e-4015-9418-f0854d431b85/volumes" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.333315 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" path="/var/lib/kubelet/pods/65c26ddd-9a26-4b9c-b3fa-74827d33872a/volumes" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.334001 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" path="/var/lib/kubelet/pods/bb095312-fe29-458a-b5d5-8fd607a89e8b/volumes" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.335786 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" path="/var/lib/kubelet/pods/ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e/volumes" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.336811 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" path="/var/lib/kubelet/pods/f9e7ffee-9028-45cb-83bd-3d5a0019ab16/volumes" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.340498 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.364887 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.384119 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.436035 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.441202 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.476872 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.496541 4852 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.542236 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.631132 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.655788 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.697038 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.731790 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.774267 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.819908 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 20:09:14 crc kubenswrapper[4852]: I1201 20:09:14.843032 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.000098 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.077420 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.158870 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.161906 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.244917 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.320652 4852 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.352579 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.410693 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.461653 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.488542 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.504555 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.505949 4852 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.506349 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351" gracePeriod=5 Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.606557 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.612999 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.682801 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.688411 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.847309 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.859504 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.907560 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.909285 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 20:09:15 crc kubenswrapper[4852]: I1201 20:09:15.977022 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.031683 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.036213 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.051124 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.115139 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.209205 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.295857 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.350474 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.382430 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.438827 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.503976 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.512132 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.512544 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.551675 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.552614 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.660436 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.716431 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.717929 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.739777 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.801719 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.869122 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.932327 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 20:09:16 crc kubenswrapper[4852]: I1201 20:09:16.997429 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.003861 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.047060 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.067722 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.170649 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.244564 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.266411 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.497188 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.810977 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.855144 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.858171 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.874170 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 20:09:17 crc kubenswrapper[4852]: I1201 20:09:17.915118 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 20:09:18 crc kubenswrapper[4852]: I1201 20:09:18.036493 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 20:09:18 crc kubenswrapper[4852]: I1201 20:09:18.167570 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 20:09:18 crc kubenswrapper[4852]: I1201 20:09:18.226635 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 20:09:18 crc kubenswrapper[4852]: I1201 20:09:18.451754 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 20:09:18 crc kubenswrapper[4852]: I1201 20:09:18.502878 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 20:09:19 crc kubenswrapper[4852]: I1201 20:09:19.121069 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 20:09:19 crc kubenswrapper[4852]: I1201 20:09:19.168684 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 20:09:19 crc kubenswrapper[4852]: I1201 20:09:19.170415 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 20:09:19 crc kubenswrapper[4852]: I1201 20:09:19.504967 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 20:09:19 crc kubenswrapper[4852]: I1201 20:09:19.571117 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.636074 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.636494 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.724267 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.768215 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.816763 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.816840 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.816921 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.816961 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.816984 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817059 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817133 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817133 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817199 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817318 4852 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817340 4852 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817352 4852 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.817367 4852 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.829909 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.917415 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 20:09:20 crc kubenswrapper[4852]: I1201 20:09:20.918340 4852 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.051842 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.081058 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.126346 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.327111 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.327229 4852 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351" exitCode=137 Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.327318 4852 scope.go:117] "RemoveContainer" containerID="9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.327368 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.349001 4852 scope.go:117] "RemoveContainer" containerID="9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351" Dec 01 20:09:21 crc kubenswrapper[4852]: E1201 20:09:21.349703 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351\": container with ID starting with 9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351 not found: ID does not exist" containerID="9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351" Dec 01 20:09:21 crc kubenswrapper[4852]: I1201 20:09:21.349790 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351"} err="failed to get container status \"9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351\": rpc error: code = NotFound desc = could not find container \"9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351\": container with ID starting with 9548d99ffe9259ecc4826c7a28f3f32c62f7790216332f87ea57ee5d7a71d351 not found: ID does not exist" Dec 01 20:09:22 crc kubenswrapper[4852]: I1201 20:09:22.328058 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 01 20:09:26 crc kubenswrapper[4852]: I1201 20:09:26.321136 4852 scope.go:117] "RemoveContainer" containerID="55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834" Dec 01 20:09:26 crc kubenswrapper[4852]: E1201 20:09:26.322839 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-vbxrx_openshift-marketplace(674a3020-de73-41ff-b140-3ab2bc9d11aa)\"" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podUID="674a3020-de73-41ff-b140-3ab2bc9d11aa" Dec 01 20:09:38 crc kubenswrapper[4852]: I1201 20:09:38.320075 4852 scope.go:117] "RemoveContainer" containerID="55dc3a1d5d24292cc32fa95559d297c73b9f074240f04a3cc60ef756b4b74834" Dec 01 20:09:39 crc kubenswrapper[4852]: I1201 20:09:39.436843 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/2.log" Dec 01 20:09:39 crc kubenswrapper[4852]: I1201 20:09:39.437333 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" event={"ID":"674a3020-de73-41ff-b140-3ab2bc9d11aa","Type":"ContainerStarted","Data":"6951ff3f7f5860810c6f86877759faad55878d99aa90fb0d5eeff8e1e58e25e6"} Dec 01 20:09:39 crc kubenswrapper[4852]: I1201 20:09:39.437899 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:39 crc kubenswrapper[4852]: I1201 20:09:39.442193 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" Dec 01 20:09:39 crc kubenswrapper[4852]: I1201 20:09:39.465937 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vbxrx" podStartSLOduration=67.465904858 podStartE2EDuration="1m7.465904858s" podCreationTimestamp="2025-12-01 20:08:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:09:39.460344769 +0000 UTC m=+299.387426196" watchObservedRunningTime="2025-12-01 20:09:39.465904858 +0000 UTC m=+299.392986315" Dec 01 20:09:45 crc kubenswrapper[4852]: I1201 20:09:45.200708 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:09:45 crc kubenswrapper[4852]: I1201 20:09:45.202096 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerName="controller-manager" containerID="cri-o://6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426" gracePeriod=30 Dec 01 20:09:45 crc kubenswrapper[4852]: I1201 20:09:45.294670 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:09:45 crc kubenswrapper[4852]: I1201 20:09:45.295040 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" podUID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" containerName="route-controller-manager" containerID="cri-o://ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669" gracePeriod=30 Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.101945 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.187049 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.305825 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles\") pod \"b2ae2ef7-e308-4896-b697-31b8241dffca\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.305883 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config\") pod \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.305942 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config\") pod \"b2ae2ef7-e308-4896-b697-31b8241dffca\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.305969 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert\") pod \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.305994 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca\") pod \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.306023 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jj9n\" (UniqueName: \"kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n\") pod \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\" (UID: \"ac2e2b97-df99-4373-b8f4-990f66fdc01b\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.306663 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert\") pod \"b2ae2ef7-e308-4896-b697-31b8241dffca\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.306732 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca\") pod \"b2ae2ef7-e308-4896-b697-31b8241dffca\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.306760 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2gtg\" (UniqueName: \"kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg\") pod \"b2ae2ef7-e308-4896-b697-31b8241dffca\" (UID: \"b2ae2ef7-e308-4896-b697-31b8241dffca\") " Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.307318 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca" (OuterVolumeSpecName: "client-ca") pod "b2ae2ef7-e308-4896-b697-31b8241dffca" (UID: "b2ae2ef7-e308-4896-b697-31b8241dffca"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.307348 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca" (OuterVolumeSpecName: "client-ca") pod "ac2e2b97-df99-4373-b8f4-990f66fdc01b" (UID: "ac2e2b97-df99-4373-b8f4-990f66fdc01b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.307887 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config" (OuterVolumeSpecName: "config") pod "ac2e2b97-df99-4373-b8f4-990f66fdc01b" (UID: "ac2e2b97-df99-4373-b8f4-990f66fdc01b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.307975 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b2ae2ef7-e308-4896-b697-31b8241dffca" (UID: "b2ae2ef7-e308-4896-b697-31b8241dffca"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.308110 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config" (OuterVolumeSpecName: "config") pod "b2ae2ef7-e308-4896-b697-31b8241dffca" (UID: "b2ae2ef7-e308-4896-b697-31b8241dffca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.312264 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b2ae2ef7-e308-4896-b697-31b8241dffca" (UID: "b2ae2ef7-e308-4896-b697-31b8241dffca"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.312282 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ac2e2b97-df99-4373-b8f4-990f66fdc01b" (UID: "ac2e2b97-df99-4373-b8f4-990f66fdc01b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.312294 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n" (OuterVolumeSpecName: "kube-api-access-9jj9n") pod "ac2e2b97-df99-4373-b8f4-990f66fdc01b" (UID: "ac2e2b97-df99-4373-b8f4-990f66fdc01b"). InnerVolumeSpecName "kube-api-access-9jj9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.312284 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg" (OuterVolumeSpecName: "kube-api-access-m2gtg") pod "b2ae2ef7-e308-4896-b697-31b8241dffca" (UID: "b2ae2ef7-e308-4896-b697-31b8241dffca"). InnerVolumeSpecName "kube-api-access-m2gtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408632 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac2e2b97-df99-4373-b8f4-990f66fdc01b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408687 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408831 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jj9n\" (UniqueName: \"kubernetes.io/projected/ac2e2b97-df99-4373-b8f4-990f66fdc01b-kube-api-access-9jj9n\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408852 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2ae2ef7-e308-4896-b697-31b8241dffca-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408868 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408880 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2gtg\" (UniqueName: \"kubernetes.io/projected/b2ae2ef7-e308-4896-b697-31b8241dffca-kube-api-access-m2gtg\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408891 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408904 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac2e2b97-df99-4373-b8f4-990f66fdc01b-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.408915 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ae2ef7-e308-4896-b697-31b8241dffca-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.486787 4852 generic.go:334] "Generic (PLEG): container finished" podID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerID="6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426" exitCode=0 Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.486898 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" event={"ID":"b2ae2ef7-e308-4896-b697-31b8241dffca","Type":"ContainerDied","Data":"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426"} Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.486932 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.486967 4852 scope.go:117] "RemoveContainer" containerID="6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.486949 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rxccs" event={"ID":"b2ae2ef7-e308-4896-b697-31b8241dffca","Type":"ContainerDied","Data":"9e3fbf43ad64cae5b6055239262e998ce4fef6086a2060106fdbdff8924d4939"} Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.489058 4852 generic.go:334] "Generic (PLEG): container finished" podID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" containerID="ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669" exitCode=0 Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.489144 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.489162 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" event={"ID":"ac2e2b97-df99-4373-b8f4-990f66fdc01b","Type":"ContainerDied","Data":"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669"} Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.489204 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd" event={"ID":"ac2e2b97-df99-4373-b8f4-990f66fdc01b","Type":"ContainerDied","Data":"f13f39106cc7bee9f49c7b653fb9c1f9ea50cd1d5d9277fa6f4e7bff6908db4e"} Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.507251 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.513037 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rxccs"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.514984 4852 scope.go:117] "RemoveContainer" containerID="6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.516256 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426\": container with ID starting with 6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426 not found: ID does not exist" containerID="6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.516293 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426"} err="failed to get container status \"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426\": rpc error: code = NotFound desc = could not find container \"6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426\": container with ID starting with 6eb0c312f5824725b2475480c4b8c0cdf7a6c9865fed4f4cbf7ddae707947426 not found: ID does not exist" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.516325 4852 scope.go:117] "RemoveContainer" containerID="ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.524825 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.527872 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zvtsd"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.536310 4852 scope.go:117] "RemoveContainer" containerID="ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.536855 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669\": container with ID starting with ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669 not found: ID does not exist" containerID="ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.536915 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669"} err="failed to get container status \"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669\": rpc error: code = NotFound desc = could not find container \"ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669\": container with ID starting with ff923fc6a18f57114cd1c8b31e273fbbe332543f17b0cd2fcc62562b81062669 not found: ID does not exist" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879139 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879573 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879596 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879611 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879622 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879632 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879643 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879654 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879662 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879674 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879682 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879693 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879702 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879722 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerName="controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879732 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerName="controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879743 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879752 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879764 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879772 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879782 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" containerName="route-controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879791 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" containerName="route-controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879808 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879815 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879828 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879836 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="extract-utilities" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879848 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879856 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879868 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" containerName="installer" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879876 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" containerName="installer" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879888 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879896 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879909 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879917 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: E1201 20:09:46.879926 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.879934 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="extract-content" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880048 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" containerName="route-controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880060 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9e7ffee-9028-45cb-83bd-3d5a0019ab16" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880075 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb095312-fe29-458a-b5d5-8fd607a89e8b" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880085 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="10a3a8e7-980e-4015-9418-f0854d431b85" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880096 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" containerName="controller-manager" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880107 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="65c26ddd-9a26-4b9c-b3fa-74827d33872a" containerName="marketplace-operator" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880117 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb1af6df-557b-4088-8ea4-6f1940ddd885" containerName="installer" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880124 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea3aaa39-f0ff-4aaf-858f-8b9824e35d0e" containerName="registry-server" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880137 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.880888 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.884348 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.884519 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.884631 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.884740 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.884975 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.885005 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.885061 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.886258 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.889193 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.889809 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.890484 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.890524 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.895646 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.895704 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.906584 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.910841 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:09:46 crc kubenswrapper[4852]: I1201 20:09:46.917213 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018139 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018223 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018252 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018276 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt59q\" (UniqueName: \"kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018304 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018360 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018407 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018444 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpltj\" (UniqueName: \"kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.018502 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120116 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120190 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120230 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpltj\" (UniqueName: \"kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120262 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120281 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120330 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120350 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt59q\" (UniqueName: \"kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.120373 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.121800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.122776 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.122968 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.125200 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.125629 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.126435 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.130021 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.138945 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt59q\" (UniqueName: \"kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q\") pod \"route-controller-manager-7bfc45787b-5d5jw\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.143890 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpltj\" (UniqueName: \"kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj\") pod \"controller-manager-5f75b5947d-k4hl8\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.221825 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.240991 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.474678 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.496753 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" event={"ID":"9a155b21-2448-4b37-a16d-6948f4fee025","Type":"ContainerStarted","Data":"ddda40fe8f79b56991cc433a998a528531c700df4e391995d75a5c2d3c6103ad"} Dec 01 20:09:47 crc kubenswrapper[4852]: I1201 20:09:47.507174 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:09:47 crc kubenswrapper[4852]: W1201 20:09:47.511837 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd568b48_d746_4b52_9ea9_99f7e80399b7.slice/crio-fca48b1b7187ebf46a7bdd47f75b17f21a8f6cb5f6f24bbe276e4008dbd82179 WatchSource:0}: Error finding container fca48b1b7187ebf46a7bdd47f75b17f21a8f6cb5f6f24bbe276e4008dbd82179: Status 404 returned error can't find the container with id fca48b1b7187ebf46a7bdd47f75b17f21a8f6cb5f6f24bbe276e4008dbd82179 Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.335869 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac2e2b97-df99-4373-b8f4-990f66fdc01b" path="/var/lib/kubelet/pods/ac2e2b97-df99-4373-b8f4-990f66fdc01b/volumes" Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.337328 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ae2ef7-e308-4896-b697-31b8241dffca" path="/var/lib/kubelet/pods/b2ae2ef7-e308-4896-b697-31b8241dffca/volumes" Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.506240 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" event={"ID":"9a155b21-2448-4b37-a16d-6948f4fee025","Type":"ContainerStarted","Data":"b4ff65ce795b711c66ad71d5a9af9550f810ffcff61ced90d802d8c67c0d8309"} Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.507798 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" event={"ID":"cd568b48-d746-4b52-9ea9-99f7e80399b7","Type":"ContainerStarted","Data":"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f"} Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.507834 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" event={"ID":"cd568b48-d746-4b52-9ea9-99f7e80399b7","Type":"ContainerStarted","Data":"fca48b1b7187ebf46a7bdd47f75b17f21a8f6cb5f6f24bbe276e4008dbd82179"} Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.508013 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.512545 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.526287 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" podStartSLOduration=3.526254902 podStartE2EDuration="3.526254902s" podCreationTimestamp="2025-12-01 20:09:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:09:48.523904246 +0000 UTC m=+308.450985693" watchObservedRunningTime="2025-12-01 20:09:48.526254902 +0000 UTC m=+308.453336319" Dec 01 20:09:48 crc kubenswrapper[4852]: I1201 20:09:48.544026 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" podStartSLOduration=3.543997805 podStartE2EDuration="3.543997805s" podCreationTimestamp="2025-12-01 20:09:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:09:48.540592185 +0000 UTC m=+308.467673612" watchObservedRunningTime="2025-12-01 20:09:48.543997805 +0000 UTC m=+308.471079222" Dec 01 20:09:49 crc kubenswrapper[4852]: I1201 20:09:49.515566 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:09:49 crc kubenswrapper[4852]: I1201 20:09:49.524505 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.180054 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.182619 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" podUID="9a155b21-2448-4b37-a16d-6948f4fee025" containerName="route-controller-manager" containerID="cri-o://b4ff65ce795b711c66ad71d5a9af9550f810ffcff61ced90d802d8c67c0d8309" gracePeriod=30 Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.620259 4852 generic.go:334] "Generic (PLEG): container finished" podID="9a155b21-2448-4b37-a16d-6948f4fee025" containerID="b4ff65ce795b711c66ad71d5a9af9550f810ffcff61ced90d802d8c67c0d8309" exitCode=0 Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.620395 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" event={"ID":"9a155b21-2448-4b37-a16d-6948f4fee025","Type":"ContainerDied","Data":"b4ff65ce795b711c66ad71d5a9af9550f810ffcff61ced90d802d8c67c0d8309"} Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.620482 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" event={"ID":"9a155b21-2448-4b37-a16d-6948f4fee025","Type":"ContainerDied","Data":"ddda40fe8f79b56991cc433a998a528531c700df4e391995d75a5c2d3c6103ad"} Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.620502 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddda40fe8f79b56991cc433a998a528531c700df4e391995d75a5c2d3c6103ad" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.626253 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.723696 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca\") pod \"9a155b21-2448-4b37-a16d-6948f4fee025\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.723894 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert\") pod \"9a155b21-2448-4b37-a16d-6948f4fee025\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.724004 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config\") pod \"9a155b21-2448-4b37-a16d-6948f4fee025\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.724063 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt59q\" (UniqueName: \"kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q\") pod \"9a155b21-2448-4b37-a16d-6948f4fee025\" (UID: \"9a155b21-2448-4b37-a16d-6948f4fee025\") " Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.724853 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca" (OuterVolumeSpecName: "client-ca") pod "9a155b21-2448-4b37-a16d-6948f4fee025" (UID: "9a155b21-2448-4b37-a16d-6948f4fee025"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.725277 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config" (OuterVolumeSpecName: "config") pod "9a155b21-2448-4b37-a16d-6948f4fee025" (UID: "9a155b21-2448-4b37-a16d-6948f4fee025"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.729283 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9a155b21-2448-4b37-a16d-6948f4fee025" (UID: "9a155b21-2448-4b37-a16d-6948f4fee025"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.729329 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q" (OuterVolumeSpecName: "kube-api-access-qt59q") pod "9a155b21-2448-4b37-a16d-6948f4fee025" (UID: "9a155b21-2448-4b37-a16d-6948f4fee025"). InnerVolumeSpecName "kube-api-access-qt59q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.826165 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.826250 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt59q\" (UniqueName: \"kubernetes.io/projected/9a155b21-2448-4b37-a16d-6948f4fee025-kube-api-access-qt59q\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.826266 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a155b21-2448-4b37-a16d-6948f4fee025-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:05 crc kubenswrapper[4852]: I1201 20:10:05.826281 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a155b21-2448-4b37-a16d-6948f4fee025-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.627707 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.651712 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.655578 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bfc45787b-5d5jw"] Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.897740 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh"] Dec 01 20:10:06 crc kubenswrapper[4852]: E1201 20:10:06.898258 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a155b21-2448-4b37-a16d-6948f4fee025" containerName="route-controller-manager" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.898296 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a155b21-2448-4b37-a16d-6948f4fee025" containerName="route-controller-manager" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.898683 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a155b21-2448-4b37-a16d-6948f4fee025" containerName="route-controller-manager" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.899645 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.902357 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.903810 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.904087 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.904317 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.904448 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.905649 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.909936 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh"] Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.945099 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cf2v\" (UniqueName: \"kubernetes.io/projected/a7673688-5bad-4081-bdf0-a85d7810ccc9-kube-api-access-6cf2v\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.945205 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-client-ca\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.945329 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7673688-5bad-4081-bdf0-a85d7810ccc9-serving-cert\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:06 crc kubenswrapper[4852]: I1201 20:10:06.945504 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-config\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.046712 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-client-ca\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.046780 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7673688-5bad-4081-bdf0-a85d7810ccc9-serving-cert\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.046867 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-config\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.046973 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cf2v\" (UniqueName: \"kubernetes.io/projected/a7673688-5bad-4081-bdf0-a85d7810ccc9-kube-api-access-6cf2v\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.048051 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-client-ca\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.048694 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7673688-5bad-4081-bdf0-a85d7810ccc9-config\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.055441 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7673688-5bad-4081-bdf0-a85d7810ccc9-serving-cert\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.071707 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cf2v\" (UniqueName: \"kubernetes.io/projected/a7673688-5bad-4081-bdf0-a85d7810ccc9-kube-api-access-6cf2v\") pod \"route-controller-manager-6fb5ccf9c6-rg2fh\" (UID: \"a7673688-5bad-4081-bdf0-a85d7810ccc9\") " pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.231594 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:07 crc kubenswrapper[4852]: I1201 20:10:07.719549 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh"] Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.333683 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a155b21-2448-4b37-a16d-6948f4fee025" path="/var/lib/kubelet/pods/9a155b21-2448-4b37-a16d-6948f4fee025/volumes" Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.647415 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" event={"ID":"a7673688-5bad-4081-bdf0-a85d7810ccc9","Type":"ContainerStarted","Data":"2de4f850998c98fcf2c199923a3a2bc273a17f631d4458ace5e539763cd855e0"} Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.647507 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" event={"ID":"a7673688-5bad-4081-bdf0-a85d7810ccc9","Type":"ContainerStarted","Data":"6ce204b381f15b71dc24733c5dfe27b7d07dd0b2301a72fe6618c49b1c539a62"} Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.647787 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.657500 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" Dec 01 20:10:08 crc kubenswrapper[4852]: I1201 20:10:08.690123 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6fb5ccf9c6-rg2fh" podStartSLOduration=3.690085512 podStartE2EDuration="3.690085512s" podCreationTimestamp="2025-12-01 20:10:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:10:08.672718305 +0000 UTC m=+328.599799742" watchObservedRunningTime="2025-12-01 20:10:08.690085512 +0000 UTC m=+328.617166959" Dec 01 20:10:20 crc kubenswrapper[4852]: I1201 20:10:20.230251 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:10:20 crc kubenswrapper[4852]: I1201 20:10:20.231099 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:10:30 crc kubenswrapper[4852]: I1201 20:10:30.742557 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.257404 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6vzc5"] Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.260213 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.263740 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.282680 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6vzc5"] Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.378774 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-utilities\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.378866 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-catalog-content\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.378980 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xp5\" (UniqueName: \"kubernetes.io/projected/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-kube-api-access-g6xp5\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.480478 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-catalog-content\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.480591 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xp5\" (UniqueName: \"kubernetes.io/projected/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-kube-api-access-g6xp5\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.480716 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-utilities\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.481354 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-utilities\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.481359 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-catalog-content\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.519319 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xp5\" (UniqueName: \"kubernetes.io/projected/6e5d579f-42bd-43e4-92b0-fba046d9f9a3-kube-api-access-g6xp5\") pod \"redhat-operators-6vzc5\" (UID: \"6e5d579f-42bd-43e4-92b0-fba046d9f9a3\") " pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:45 crc kubenswrapper[4852]: I1201 20:10:45.593570 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:46 crc kubenswrapper[4852]: I1201 20:10:46.035299 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6vzc5"] Dec 01 20:10:46 crc kubenswrapper[4852]: I1201 20:10:46.940870 4852 generic.go:334] "Generic (PLEG): container finished" podID="6e5d579f-42bd-43e4-92b0-fba046d9f9a3" containerID="dc56115a3ecb05012d0777f421a29595dcba1d9f76846088f925c7200387de9b" exitCode=0 Dec 01 20:10:46 crc kubenswrapper[4852]: I1201 20:10:46.940953 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vzc5" event={"ID":"6e5d579f-42bd-43e4-92b0-fba046d9f9a3","Type":"ContainerDied","Data":"dc56115a3ecb05012d0777f421a29595dcba1d9f76846088f925c7200387de9b"} Dec 01 20:10:46 crc kubenswrapper[4852]: I1201 20:10:46.941359 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vzc5" event={"ID":"6e5d579f-42bd-43e4-92b0-fba046d9f9a3","Type":"ContainerStarted","Data":"c3f31546c61999991bb8736a5c080ff7fef489096764c746b31a05e1eb7ca5db"} Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.056802 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h6p7j"] Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.059596 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.061803 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.072370 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h6p7j"] Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.104877 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-utilities\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.104957 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87xdc\" (UniqueName: \"kubernetes.io/projected/bdfa42a5-ee04-4343-a772-bacb19117993-kube-api-access-87xdc\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.105148 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-catalog-content\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.206411 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87xdc\" (UniqueName: \"kubernetes.io/projected/bdfa42a5-ee04-4343-a772-bacb19117993-kube-api-access-87xdc\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.206532 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-catalog-content\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.206591 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-utilities\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.207173 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-utilities\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.207833 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdfa42a5-ee04-4343-a772-bacb19117993-catalog-content\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.230583 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87xdc\" (UniqueName: \"kubernetes.io/projected/bdfa42a5-ee04-4343-a772-bacb19117993-kube-api-access-87xdc\") pod \"community-operators-h6p7j\" (UID: \"bdfa42a5-ee04-4343-a772-bacb19117993\") " pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.378082 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.658848 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sqdrn"] Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.660570 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.666948 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.669099 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sqdrn"] Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.714873 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-catalog-content\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.714961 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj7rn\" (UniqueName: \"kubernetes.io/projected/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-kube-api-access-hj7rn\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.715023 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-utilities\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.809835 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h6p7j"] Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.816538 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-catalog-content\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.816685 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj7rn\" (UniqueName: \"kubernetes.io/projected/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-kube-api-access-hj7rn\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.816741 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-utilities\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.817234 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-catalog-content\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.817405 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-utilities\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: W1201 20:10:47.823954 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbdfa42a5_ee04_4343_a772_bacb19117993.slice/crio-7b2217395eb2d1e3cefdb5e15b3ae82feb4ee54572088c63f4b1acdd69c95598 WatchSource:0}: Error finding container 7b2217395eb2d1e3cefdb5e15b3ae82feb4ee54572088c63f4b1acdd69c95598: Status 404 returned error can't find the container with id 7b2217395eb2d1e3cefdb5e15b3ae82feb4ee54572088c63f4b1acdd69c95598 Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.842774 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj7rn\" (UniqueName: \"kubernetes.io/projected/f798cf97-cdb1-43d0-b586-8cfa4fbb71ed-kube-api-access-hj7rn\") pod \"certified-operators-sqdrn\" (UID: \"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed\") " pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.950008 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6p7j" event={"ID":"bdfa42a5-ee04-4343-a772-bacb19117993","Type":"ContainerStarted","Data":"7b2217395eb2d1e3cefdb5e15b3ae82feb4ee54572088c63f4b1acdd69c95598"} Dec 01 20:10:47 crc kubenswrapper[4852]: I1201 20:10:47.981795 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:48 crc kubenswrapper[4852]: I1201 20:10:48.396002 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sqdrn"] Dec 01 20:10:48 crc kubenswrapper[4852]: I1201 20:10:48.959994 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqdrn" event={"ID":"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed","Type":"ContainerStarted","Data":"bc500d664ed20308a79141e4cd20bb5b5b5bce4fb49a40f9962b7cb69e514a07"} Dec 01 20:10:48 crc kubenswrapper[4852]: I1201 20:10:48.961929 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6p7j" event={"ID":"bdfa42a5-ee04-4343-a772-bacb19117993","Type":"ContainerStarted","Data":"5353d9d5ea55a11d5b9c467d6dec6512d5062c677841541184c9b07c35688945"} Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.452507 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xcpbh"] Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.453961 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.457210 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.471307 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcpbh"] Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.559402 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-catalog-content\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.559480 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sds6g\" (UniqueName: \"kubernetes.io/projected/2fb8eb65-b853-4f81-9650-abde242b8300-kube-api-access-sds6g\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.559907 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-utilities\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.661688 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-utilities\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.661777 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-catalog-content\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.661820 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sds6g\" (UniqueName: \"kubernetes.io/projected/2fb8eb65-b853-4f81-9650-abde242b8300-kube-api-access-sds6g\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.662346 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-catalog-content\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.662387 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fb8eb65-b853-4f81-9650-abde242b8300-utilities\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.684832 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sds6g\" (UniqueName: \"kubernetes.io/projected/2fb8eb65-b853-4f81-9650-abde242b8300-kube-api-access-sds6g\") pod \"redhat-marketplace-xcpbh\" (UID: \"2fb8eb65-b853-4f81-9650-abde242b8300\") " pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.770624 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.982363 4852 generic.go:334] "Generic (PLEG): container finished" podID="6e5d579f-42bd-43e4-92b0-fba046d9f9a3" containerID="2aa93daa1313d162ab6c5bc362a483921ebdb8dc65d58ab55596befd11c28610" exitCode=0 Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.982685 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vzc5" event={"ID":"6e5d579f-42bd-43e4-92b0-fba046d9f9a3","Type":"ContainerDied","Data":"2aa93daa1313d162ab6c5bc362a483921ebdb8dc65d58ab55596befd11c28610"} Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.989297 4852 generic.go:334] "Generic (PLEG): container finished" podID="bdfa42a5-ee04-4343-a772-bacb19117993" containerID="5353d9d5ea55a11d5b9c467d6dec6512d5062c677841541184c9b07c35688945" exitCode=0 Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.989399 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6p7j" event={"ID":"bdfa42a5-ee04-4343-a772-bacb19117993","Type":"ContainerDied","Data":"5353d9d5ea55a11d5b9c467d6dec6512d5062c677841541184c9b07c35688945"} Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.992922 4852 generic.go:334] "Generic (PLEG): container finished" podID="f798cf97-cdb1-43d0-b586-8cfa4fbb71ed" containerID="8a58da176a071ec39452ae83a2d29d06c8f4c07d1d856d67985b90499f6c67d4" exitCode=0 Dec 01 20:10:49 crc kubenswrapper[4852]: I1201 20:10:49.993047 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqdrn" event={"ID":"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed","Type":"ContainerDied","Data":"8a58da176a071ec39452ae83a2d29d06c8f4c07d1d856d67985b90499f6c67d4"} Dec 01 20:10:50 crc kubenswrapper[4852]: I1201 20:10:50.018421 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcpbh"] Dec 01 20:10:50 crc kubenswrapper[4852]: W1201 20:10:50.052673 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fb8eb65_b853_4f81_9650_abde242b8300.slice/crio-2dd2d53c9b6f47ed2549f86226d204b789b6e20d40a3907fa43d12937d9189ce WatchSource:0}: Error finding container 2dd2d53c9b6f47ed2549f86226d204b789b6e20d40a3907fa43d12937d9189ce: Status 404 returned error can't find the container with id 2dd2d53c9b6f47ed2549f86226d204b789b6e20d40a3907fa43d12937d9189ce Dec 01 20:10:50 crc kubenswrapper[4852]: I1201 20:10:50.230090 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:10:50 crc kubenswrapper[4852]: I1201 20:10:50.230187 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:10:51 crc kubenswrapper[4852]: I1201 20:10:51.003502 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vzc5" event={"ID":"6e5d579f-42bd-43e4-92b0-fba046d9f9a3","Type":"ContainerStarted","Data":"83e360de588b4eb288ee4337c839e25013d9ebdd2cf8ba520df29071b7312375"} Dec 01 20:10:51 crc kubenswrapper[4852]: I1201 20:10:51.005759 4852 generic.go:334] "Generic (PLEG): container finished" podID="2fb8eb65-b853-4f81-9650-abde242b8300" containerID="a246efffb1988204b05f4fbae9c3a59585ff7e9cd6cca880fd7ecd1793ce696b" exitCode=0 Dec 01 20:10:51 crc kubenswrapper[4852]: I1201 20:10:51.005796 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcpbh" event={"ID":"2fb8eb65-b853-4f81-9650-abde242b8300","Type":"ContainerDied","Data":"a246efffb1988204b05f4fbae9c3a59585ff7e9cd6cca880fd7ecd1793ce696b"} Dec 01 20:10:51 crc kubenswrapper[4852]: I1201 20:10:51.005829 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcpbh" event={"ID":"2fb8eb65-b853-4f81-9650-abde242b8300","Type":"ContainerStarted","Data":"2dd2d53c9b6f47ed2549f86226d204b789b6e20d40a3907fa43d12937d9189ce"} Dec 01 20:10:52 crc kubenswrapper[4852]: I1201 20:10:52.020537 4852 generic.go:334] "Generic (PLEG): container finished" podID="bdfa42a5-ee04-4343-a772-bacb19117993" containerID="50c89dd30f253d8c99dcb7a71a6d117511b53999f288052c14d96117a9f21ce5" exitCode=0 Dec 01 20:10:52 crc kubenswrapper[4852]: I1201 20:10:52.020665 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6p7j" event={"ID":"bdfa42a5-ee04-4343-a772-bacb19117993","Type":"ContainerDied","Data":"50c89dd30f253d8c99dcb7a71a6d117511b53999f288052c14d96117a9f21ce5"} Dec 01 20:10:52 crc kubenswrapper[4852]: I1201 20:10:52.023446 4852 generic.go:334] "Generic (PLEG): container finished" podID="f798cf97-cdb1-43d0-b586-8cfa4fbb71ed" containerID="5413662667c28f366a7111c2a163f08017f6461585dc36d381c2ba80a3202899" exitCode=0 Dec 01 20:10:52 crc kubenswrapper[4852]: I1201 20:10:52.023740 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqdrn" event={"ID":"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed","Type":"ContainerDied","Data":"5413662667c28f366a7111c2a163f08017f6461585dc36d381c2ba80a3202899"} Dec 01 20:10:52 crc kubenswrapper[4852]: I1201 20:10:52.080585 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6vzc5" podStartSLOduration=3.234037696 podStartE2EDuration="7.080548702s" podCreationTimestamp="2025-12-01 20:10:45 +0000 UTC" firstStartedPulling="2025-12-01 20:10:46.945333965 +0000 UTC m=+366.872415422" lastFinishedPulling="2025-12-01 20:10:50.791844991 +0000 UTC m=+370.718926428" observedRunningTime="2025-12-01 20:10:52.079027974 +0000 UTC m=+372.006109401" watchObservedRunningTime="2025-12-01 20:10:52.080548702 +0000 UTC m=+372.007630159" Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.033533 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6p7j" event={"ID":"bdfa42a5-ee04-4343-a772-bacb19117993","Type":"ContainerStarted","Data":"eb5659a056fcef81fb11942adb0285aaff6e89d54028cc448ed8b2a8c138baaf"} Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.035777 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqdrn" event={"ID":"f798cf97-cdb1-43d0-b586-8cfa4fbb71ed","Type":"ContainerStarted","Data":"922964c3ab3fa7ebd81401e6ed06db0dc98a8935404ac6823926f1ff670c0591"} Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.038339 4852 generic.go:334] "Generic (PLEG): container finished" podID="2fb8eb65-b853-4f81-9650-abde242b8300" containerID="46356e66d5630cec2a25474ffc994b5f4a4313c1838cdbf95f46f151ef30eef8" exitCode=0 Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.038402 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcpbh" event={"ID":"2fb8eb65-b853-4f81-9650-abde242b8300","Type":"ContainerDied","Data":"46356e66d5630cec2a25474ffc994b5f4a4313c1838cdbf95f46f151ef30eef8"} Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.060018 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h6p7j" podStartSLOduration=3.289467902 podStartE2EDuration="6.059991418s" podCreationTimestamp="2025-12-01 20:10:47 +0000 UTC" firstStartedPulling="2025-12-01 20:10:49.992239812 +0000 UTC m=+369.919321229" lastFinishedPulling="2025-12-01 20:10:52.762763328 +0000 UTC m=+372.689844745" observedRunningTime="2025-12-01 20:10:53.056786156 +0000 UTC m=+372.983867573" watchObservedRunningTime="2025-12-01 20:10:53.059991418 +0000 UTC m=+372.987072835" Dec 01 20:10:53 crc kubenswrapper[4852]: I1201 20:10:53.103180 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sqdrn" podStartSLOduration=3.416836307 podStartE2EDuration="6.103158202s" podCreationTimestamp="2025-12-01 20:10:47 +0000 UTC" firstStartedPulling="2025-12-01 20:10:49.993884005 +0000 UTC m=+369.920965422" lastFinishedPulling="2025-12-01 20:10:52.6802059 +0000 UTC m=+372.607287317" observedRunningTime="2025-12-01 20:10:53.09997142 +0000 UTC m=+373.027052837" watchObservedRunningTime="2025-12-01 20:10:53.103158202 +0000 UTC m=+373.030239619" Dec 01 20:10:54 crc kubenswrapper[4852]: I1201 20:10:54.047553 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcpbh" event={"ID":"2fb8eb65-b853-4f81-9650-abde242b8300","Type":"ContainerStarted","Data":"3c0d2ef6fecb6589d9ab787e1e3dc026b8708b6650b38cabb1a257f7eec66639"} Dec 01 20:10:54 crc kubenswrapper[4852]: I1201 20:10:54.078160 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xcpbh" podStartSLOduration=2.562776321 podStartE2EDuration="5.078134694s" podCreationTimestamp="2025-12-01 20:10:49 +0000 UTC" firstStartedPulling="2025-12-01 20:10:51.007407163 +0000 UTC m=+370.934488580" lastFinishedPulling="2025-12-01 20:10:53.522765536 +0000 UTC m=+373.449846953" observedRunningTime="2025-12-01 20:10:54.074779077 +0000 UTC m=+374.001860494" watchObservedRunningTime="2025-12-01 20:10:54.078134694 +0000 UTC m=+374.005216111" Dec 01 20:10:55 crc kubenswrapper[4852]: I1201 20:10:55.593991 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:55 crc kubenswrapper[4852]: I1201 20:10:55.594081 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:10:55 crc kubenswrapper[4852]: I1201 20:10:55.775264 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" podUID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" containerName="oauth-openshift" containerID="cri-o://e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1" gracePeriod=15 Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.655527 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6vzc5" podUID="6e5d579f-42bd-43e4-92b0-fba046d9f9a3" containerName="registry-server" probeResult="failure" output=< Dec 01 20:10:56 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:10:56 crc kubenswrapper[4852]: > Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.806998 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.850681 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7986744d77-442fn"] Dec 01 20:10:56 crc kubenswrapper[4852]: E1201 20:10:56.851079 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" containerName="oauth-openshift" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.851095 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" containerName="oauth-openshift" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.851225 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" containerName="oauth-openshift" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.851938 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.868931 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7986744d77-442fn"] Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.885662 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.885736 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.885786 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.885829 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.885857 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.886689 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.886768 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.887266 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.887255 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.887066 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.887925 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.887990 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crq6k\" (UniqueName: \"kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888033 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888069 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888110 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888149 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888173 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca\") pod \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\" (UID: \"8960bbfc-ef97-4fb2-88e1-50d030de34a2\") " Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888886 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888915 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.888930 4852 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.889092 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.889655 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.893713 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.893853 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.894444 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.894729 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.894804 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.896308 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.897374 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.898311 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k" (OuterVolumeSpecName: "kube-api-access-crq6k") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "kube-api-access-crq6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.900965 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "8960bbfc-ef97-4fb2-88e1-50d030de34a2" (UID: "8960bbfc-ef97-4fb2-88e1-50d030de34a2"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990343 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990425 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-login\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990470 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb8nk\" (UniqueName: \"kubernetes.io/projected/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-kube-api-access-wb8nk\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990501 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990695 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.990965 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-policies\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.991040 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.991438 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.991752 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.991819 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-error\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.991935 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-dir\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992080 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992163 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-session\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992199 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992344 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992364 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992382 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992400 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992418 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crq6k\" (UniqueName: \"kubernetes.io/projected/8960bbfc-ef97-4fb2-88e1-50d030de34a2-kube-api-access-crq6k\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992432 4852 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992446 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992477 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992492 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992505 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:56 crc kubenswrapper[4852]: I1201 20:10:56.992520 4852 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8960bbfc-ef97-4fb2-88e1-50d030de34a2-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.072207 4852 generic.go:334] "Generic (PLEG): container finished" podID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" containerID="e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1" exitCode=0 Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.072277 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" event={"ID":"8960bbfc-ef97-4fb2-88e1-50d030de34a2","Type":"ContainerDied","Data":"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1"} Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.072329 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" event={"ID":"8960bbfc-ef97-4fb2-88e1-50d030de34a2","Type":"ContainerDied","Data":"144af989d1b1486f6f411dd22cd870db105717fa8316759e08bf72138d262ac7"} Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.072360 4852 scope.go:117] "RemoveContainer" containerID="e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.072912 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-xq52p" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.093573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-dir\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094006 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094053 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-session\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094071 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094137 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094163 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-login\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094190 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb8nk\" (UniqueName: \"kubernetes.io/projected/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-kube-api-access-wb8nk\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094224 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094257 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094282 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-policies\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094308 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094339 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094370 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.094399 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-error\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.093675 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-dir\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.096397 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.096490 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.096799 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-audit-policies\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.096865 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.101323 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-error\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.101858 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.102043 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-session\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.102083 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.102407 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.102565 4852 scope.go:117] "RemoveContainer" containerID="e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.103581 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-template-login\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: E1201 20:10:57.104118 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1\": container with ID starting with e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1 not found: ID does not exist" containerID="e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.104161 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1"} err="failed to get container status \"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1\": rpc error: code = NotFound desc = could not find container \"e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1\": container with ID starting with e35c70ca67af3d8782fdd5afdb749ce28cda3463ae3d7dca4c92da87d6503bd1 not found: ID does not exist" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.104343 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.114274 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.117345 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.118006 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb8nk\" (UniqueName: \"kubernetes.io/projected/85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7-kube-api-access-wb8nk\") pod \"oauth-openshift-7986744d77-442fn\" (UID: \"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7\") " pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.120705 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-xq52p"] Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.173104 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.378274 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.378373 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.421675 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.615303 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7986744d77-442fn"] Dec 01 20:10:57 crc kubenswrapper[4852]: W1201 20:10:57.628974 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85c1a9bc_2d78_42c0_a1f8_4bd3bbbf32f7.slice/crio-9014539b84e3fa1036b323e81bbdba97f8463b4ea23a1dcc7d21ce0451a78eae WatchSource:0}: Error finding container 9014539b84e3fa1036b323e81bbdba97f8463b4ea23a1dcc7d21ce0451a78eae: Status 404 returned error can't find the container with id 9014539b84e3fa1036b323e81bbdba97f8463b4ea23a1dcc7d21ce0451a78eae Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.982041 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:57 crc kubenswrapper[4852]: I1201 20:10:57.982581 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.040986 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.080834 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" event={"ID":"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7","Type":"ContainerStarted","Data":"9014539b84e3fa1036b323e81bbdba97f8463b4ea23a1dcc7d21ce0451a78eae"} Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.129624 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sqdrn" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.130112 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h6p7j" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.334360 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8960bbfc-ef97-4fb2-88e1-50d030de34a2" path="/var/lib/kubelet/pods/8960bbfc-ef97-4fb2-88e1-50d030de34a2/volumes" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.664764 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j82zs"] Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.666130 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.692171 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j82zs"] Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723041 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76rkp\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-kube-api-access-76rkp\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723122 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723152 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd2e01af-4f59-4a85-a9da-3952cc28f86f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723177 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-certificates\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723201 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd2e01af-4f59-4a85-a9da-3952cc28f86f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723226 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-bound-sa-token\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723247 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-trusted-ca\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.723288 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-tls\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.751161 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825003 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76rkp\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-kube-api-access-76rkp\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825143 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd2e01af-4f59-4a85-a9da-3952cc28f86f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825196 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-certificates\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825235 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd2e01af-4f59-4a85-a9da-3952cc28f86f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825278 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-bound-sa-token\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825317 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-trusted-ca\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.825390 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-tls\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.826246 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd2e01af-4f59-4a85-a9da-3952cc28f86f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.827326 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-certificates\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.827415 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd2e01af-4f59-4a85-a9da-3952cc28f86f-trusted-ca\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.834532 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd2e01af-4f59-4a85-a9da-3952cc28f86f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.835521 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-registry-tls\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.842975 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76rkp\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-kube-api-access-76rkp\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.849144 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd2e01af-4f59-4a85-a9da-3952cc28f86f-bound-sa-token\") pod \"image-registry-66df7c8f76-j82zs\" (UID: \"cd2e01af-4f59-4a85-a9da-3952cc28f86f\") " pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:58 crc kubenswrapper[4852]: I1201 20:10:58.984822 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.093315 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" event={"ID":"85c1a9bc-2d78-42c0-a1f8-4bd3bbbf32f7","Type":"ContainerStarted","Data":"c73c570a8f148cf1ac412fdb0832a34be3ce274a111fe3117bbf10b55644d284"} Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.094098 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.105905 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.156502 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7986744d77-442fn" podStartSLOduration=29.156430967 podStartE2EDuration="29.156430967s" podCreationTimestamp="2025-12-01 20:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:10:59.125431173 +0000 UTC m=+379.052512600" watchObservedRunningTime="2025-12-01 20:10:59.156430967 +0000 UTC m=+379.083512394" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.427725 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j82zs"] Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.770928 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.771623 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:10:59 crc kubenswrapper[4852]: I1201 20:10:59.822987 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:11:00 crc kubenswrapper[4852]: I1201 20:11:00.104438 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" event={"ID":"cd2e01af-4f59-4a85-a9da-3952cc28f86f","Type":"ContainerStarted","Data":"40a6d3a59be8d091d6d0a55af32b2cc5e39a8ff9d03eaa4bdc7c335eaf66485b"} Dec 01 20:11:00 crc kubenswrapper[4852]: I1201 20:11:00.104521 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" event={"ID":"cd2e01af-4f59-4a85-a9da-3952cc28f86f","Type":"ContainerStarted","Data":"3ddb193d60b528493cca3022aaa493d1784576e124156c6f4ccda1625c66d58f"} Dec 01 20:11:00 crc kubenswrapper[4852]: I1201 20:11:00.105755 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:11:00 crc kubenswrapper[4852]: I1201 20:11:00.131990 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" podStartSLOduration=2.131962006 podStartE2EDuration="2.131962006s" podCreationTimestamp="2025-12-01 20:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:11:00.127950728 +0000 UTC m=+380.055032145" watchObservedRunningTime="2025-12-01 20:11:00.131962006 +0000 UTC m=+380.059043433" Dec 01 20:11:00 crc kubenswrapper[4852]: I1201 20:11:00.154019 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xcpbh" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.214987 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.215995 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" podUID="cd568b48-d746-4b52-9ea9-99f7e80399b7" containerName="controller-manager" containerID="cri-o://a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f" gracePeriod=30 Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.640444 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.652333 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.692129 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6vzc5" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.744293 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca\") pod \"cd568b48-d746-4b52-9ea9-99f7e80399b7\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.744819 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles\") pod \"cd568b48-d746-4b52-9ea9-99f7e80399b7\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.744870 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpltj\" (UniqueName: \"kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj\") pod \"cd568b48-d746-4b52-9ea9-99f7e80399b7\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.744912 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert\") pod \"cd568b48-d746-4b52-9ea9-99f7e80399b7\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.744931 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config\") pod \"cd568b48-d746-4b52-9ea9-99f7e80399b7\" (UID: \"cd568b48-d746-4b52-9ea9-99f7e80399b7\") " Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.745293 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca" (OuterVolumeSpecName: "client-ca") pod "cd568b48-d746-4b52-9ea9-99f7e80399b7" (UID: "cd568b48-d746-4b52-9ea9-99f7e80399b7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.745504 4852 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.745490 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "cd568b48-d746-4b52-9ea9-99f7e80399b7" (UID: "cd568b48-d746-4b52-9ea9-99f7e80399b7"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.746172 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config" (OuterVolumeSpecName: "config") pod "cd568b48-d746-4b52-9ea9-99f7e80399b7" (UID: "cd568b48-d746-4b52-9ea9-99f7e80399b7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.752338 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj" (OuterVolumeSpecName: "kube-api-access-zpltj") pod "cd568b48-d746-4b52-9ea9-99f7e80399b7" (UID: "cd568b48-d746-4b52-9ea9-99f7e80399b7"). InnerVolumeSpecName "kube-api-access-zpltj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.755186 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cd568b48-d746-4b52-9ea9-99f7e80399b7" (UID: "cd568b48-d746-4b52-9ea9-99f7e80399b7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.846987 4852 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.847044 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpltj\" (UniqueName: \"kubernetes.io/projected/cd568b48-d746-4b52-9ea9-99f7e80399b7-kube-api-access-zpltj\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.847057 4852 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd568b48-d746-4b52-9ea9-99f7e80399b7-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:05 crc kubenswrapper[4852]: I1201 20:11:05.847093 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd568b48-d746-4b52-9ea9-99f7e80399b7-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.148742 4852 generic.go:334] "Generic (PLEG): container finished" podID="cd568b48-d746-4b52-9ea9-99f7e80399b7" containerID="a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f" exitCode=0 Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.148852 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.148945 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" event={"ID":"cd568b48-d746-4b52-9ea9-99f7e80399b7","Type":"ContainerDied","Data":"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f"} Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.148988 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f75b5947d-k4hl8" event={"ID":"cd568b48-d746-4b52-9ea9-99f7e80399b7","Type":"ContainerDied","Data":"fca48b1b7187ebf46a7bdd47f75b17f21a8f6cb5f6f24bbe276e4008dbd82179"} Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.149014 4852 scope.go:117] "RemoveContainer" containerID="a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.170145 4852 scope.go:117] "RemoveContainer" containerID="a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f" Dec 01 20:11:06 crc kubenswrapper[4852]: E1201 20:11:06.170796 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f\": container with ID starting with a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f not found: ID does not exist" containerID="a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.170885 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f"} err="failed to get container status \"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f\": rpc error: code = NotFound desc = could not find container \"a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f\": container with ID starting with a522d868d6514f7611f837b159d63c36e1f1f2aedd2156b5895a60889b49f08f not found: ID does not exist" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.196876 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.201997 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5f75b5947d-k4hl8"] Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.331644 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd568b48-d746-4b52-9ea9-99f7e80399b7" path="/var/lib/kubelet/pods/cd568b48-d746-4b52-9ea9-99f7e80399b7/volumes" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.942615 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58b8558775-r6ldq"] Dec 01 20:11:06 crc kubenswrapper[4852]: E1201 20:11:06.943038 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd568b48-d746-4b52-9ea9-99f7e80399b7" containerName="controller-manager" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.943066 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd568b48-d746-4b52-9ea9-99f7e80399b7" containerName="controller-manager" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.943295 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd568b48-d746-4b52-9ea9-99f7e80399b7" containerName="controller-manager" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.944288 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.948332 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.949406 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.949847 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.950234 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.951130 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.952141 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.962575 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58b8558775-r6ldq"] Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.962777 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.971447 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-serving-cert\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.971543 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-proxy-ca-bundles\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.971610 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-config\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.971704 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnlqd\" (UniqueName: \"kubernetes.io/projected/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-kube-api-access-xnlqd\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:06 crc kubenswrapper[4852]: I1201 20:11:06.971779 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-client-ca\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.073268 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-client-ca\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.073420 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-serving-cert\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.073493 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-proxy-ca-bundles\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.073569 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-config\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.073641 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnlqd\" (UniqueName: \"kubernetes.io/projected/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-kube-api-access-xnlqd\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.077016 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-client-ca\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.077132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-proxy-ca-bundles\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.077725 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-config\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.082329 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-serving-cert\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.097230 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnlqd\" (UniqueName: \"kubernetes.io/projected/cdfd3d83-01f4-4779-85d9-2fb455a9ebc2-kube-api-access-xnlqd\") pod \"controller-manager-58b8558775-r6ldq\" (UID: \"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2\") " pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.280382 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:07 crc kubenswrapper[4852]: I1201 20:11:07.548532 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58b8558775-r6ldq"] Dec 01 20:11:07 crc kubenswrapper[4852]: W1201 20:11:07.556061 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdfd3d83_01f4_4779_85d9_2fb455a9ebc2.slice/crio-4a7e2199b408489fdd8c06aba1bfbec31b376880bbc3be93513106e17ee3a608 WatchSource:0}: Error finding container 4a7e2199b408489fdd8c06aba1bfbec31b376880bbc3be93513106e17ee3a608: Status 404 returned error can't find the container with id 4a7e2199b408489fdd8c06aba1bfbec31b376880bbc3be93513106e17ee3a608 Dec 01 20:11:08 crc kubenswrapper[4852]: I1201 20:11:08.173327 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" event={"ID":"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2","Type":"ContainerStarted","Data":"d3fd70c5b19635417a5b80412df746e0f9dd2f30d7a8722866be8a9d71b03eaf"} Dec 01 20:11:08 crc kubenswrapper[4852]: I1201 20:11:08.173682 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" event={"ID":"cdfd3d83-01f4-4779-85d9-2fb455a9ebc2","Type":"ContainerStarted","Data":"4a7e2199b408489fdd8c06aba1bfbec31b376880bbc3be93513106e17ee3a608"} Dec 01 20:11:08 crc kubenswrapper[4852]: I1201 20:11:08.174009 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:08 crc kubenswrapper[4852]: I1201 20:11:08.181581 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" Dec 01 20:11:08 crc kubenswrapper[4852]: I1201 20:11:08.203061 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58b8558775-r6ldq" podStartSLOduration=3.203032091 podStartE2EDuration="3.203032091s" podCreationTimestamp="2025-12-01 20:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:11:08.2002146 +0000 UTC m=+388.127296027" watchObservedRunningTime="2025-12-01 20:11:08.203032091 +0000 UTC m=+388.130113518" Dec 01 20:11:18 crc kubenswrapper[4852]: I1201 20:11:18.989852 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-j82zs" Dec 01 20:11:19 crc kubenswrapper[4852]: I1201 20:11:19.042873 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:11:20 crc kubenswrapper[4852]: I1201 20:11:20.230334 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:11:20 crc kubenswrapper[4852]: I1201 20:11:20.230429 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:11:20 crc kubenswrapper[4852]: I1201 20:11:20.230544 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:11:20 crc kubenswrapper[4852]: I1201 20:11:20.231664 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:11:20 crc kubenswrapper[4852]: I1201 20:11:20.231759 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731" gracePeriod=600 Dec 01 20:11:21 crc kubenswrapper[4852]: I1201 20:11:21.270268 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731" exitCode=0 Dec 01 20:11:21 crc kubenswrapper[4852]: I1201 20:11:21.270320 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731"} Dec 01 20:11:21 crc kubenswrapper[4852]: I1201 20:11:21.270829 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19"} Dec 01 20:11:21 crc kubenswrapper[4852]: I1201 20:11:21.270885 4852 scope.go:117] "RemoveContainer" containerID="9361fb9a1ae6b328e7f52bb33932c12b15c22033b4e4020f7fb72cf4e0ae8012" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.083793 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" podUID="551cdd05-d373-4936-b295-281f59449cde" containerName="registry" containerID="cri-o://b0fb560d86a283e2a72f59a324a0d146af9b736cb3b44a1435e84df388361d85" gracePeriod=30 Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.452714 4852 generic.go:334] "Generic (PLEG): container finished" podID="551cdd05-d373-4936-b295-281f59449cde" containerID="b0fb560d86a283e2a72f59a324a0d146af9b736cb3b44a1435e84df388361d85" exitCode=0 Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.452772 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" event={"ID":"551cdd05-d373-4936-b295-281f59449cde","Type":"ContainerDied","Data":"b0fb560d86a283e2a72f59a324a0d146af9b736cb3b44a1435e84df388361d85"} Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.477440 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622254 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsssr\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622371 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622444 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622507 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622551 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622607 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622785 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.622818 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets\") pod \"551cdd05-d373-4936-b295-281f59449cde\" (UID: \"551cdd05-d373-4936-b295-281f59449cde\") " Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.627830 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.627897 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.632027 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.632209 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.632749 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.644065 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr" (OuterVolumeSpecName: "kube-api-access-tsssr") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "kube-api-access-tsssr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.646816 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.655650 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "551cdd05-d373-4936-b295-281f59449cde" (UID: "551cdd05-d373-4936-b295-281f59449cde"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.724894 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.724952 4852 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/551cdd05-d373-4936-b295-281f59449cde-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.725029 4852 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.725051 4852 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/551cdd05-d373-4936-b295-281f59449cde-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.725071 4852 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.725088 4852 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/551cdd05-d373-4936-b295-281f59449cde-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:44 crc kubenswrapper[4852]: I1201 20:11:44.725106 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsssr\" (UniqueName: \"kubernetes.io/projected/551cdd05-d373-4936-b295-281f59449cde-kube-api-access-tsssr\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:45 crc kubenswrapper[4852]: I1201 20:11:45.459739 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" event={"ID":"551cdd05-d373-4936-b295-281f59449cde","Type":"ContainerDied","Data":"2cbabc3034c242d9725d791ddafcaf8b2ca849de5b1fb6f7cbbec35977fc511d"} Dec 01 20:11:45 crc kubenswrapper[4852]: I1201 20:11:45.459802 4852 scope.go:117] "RemoveContainer" containerID="b0fb560d86a283e2a72f59a324a0d146af9b736cb3b44a1435e84df388361d85" Dec 01 20:11:45 crc kubenswrapper[4852]: I1201 20:11:45.459943 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ld6ql" Dec 01 20:11:45 crc kubenswrapper[4852]: I1201 20:11:45.505837 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:11:45 crc kubenswrapper[4852]: I1201 20:11:45.506911 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ld6ql"] Dec 01 20:11:46 crc kubenswrapper[4852]: I1201 20:11:46.330737 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="551cdd05-d373-4936-b295-281f59449cde" path="/var/lib/kubelet/pods/551cdd05-d373-4936-b295-281f59449cde/volumes" Dec 01 20:13:20 crc kubenswrapper[4852]: I1201 20:13:20.229406 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:13:20 crc kubenswrapper[4852]: I1201 20:13:20.229965 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:13:40 crc kubenswrapper[4852]: I1201 20:13:40.598849 4852 scope.go:117] "RemoveContainer" containerID="951cb486a4034688f180d2c2c1a28f77b64156bbd2e4851f2af7cd7809737cf1" Dec 01 20:13:50 crc kubenswrapper[4852]: I1201 20:13:50.230545 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:13:50 crc kubenswrapper[4852]: I1201 20:13:50.231320 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.230232 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.231143 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.231218 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.232186 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.232254 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19" gracePeriod=600 Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.421043 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19" exitCode=0 Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.421116 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19"} Dec 01 20:14:20 crc kubenswrapper[4852]: I1201 20:14:20.421173 4852 scope.go:117] "RemoveContainer" containerID="0d510a15a2e3605f45257253e50b41f7564e0004cc53df07be037e165d3fa731" Dec 01 20:14:21 crc kubenswrapper[4852]: I1201 20:14:21.428341 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d"} Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.180374 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c"] Dec 01 20:15:00 crc kubenswrapper[4852]: E1201 20:15:00.181399 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="551cdd05-d373-4936-b295-281f59449cde" containerName="registry" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.181415 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="551cdd05-d373-4936-b295-281f59449cde" containerName="registry" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.181581 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="551cdd05-d373-4936-b295-281f59449cde" containerName="registry" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.182074 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.187856 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.188545 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.188602 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh6k6\" (UniqueName: \"kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.188772 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.189155 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.190516 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c"] Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.289709 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.289759 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh6k6\" (UniqueName: \"kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.289836 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.290682 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.296241 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.312875 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh6k6\" (UniqueName: \"kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6\") pod \"collect-profiles-29410335-2zb2c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.504631 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:00 crc kubenswrapper[4852]: I1201 20:15:00.734318 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c"] Dec 01 20:15:01 crc kubenswrapper[4852]: I1201 20:15:01.674552 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" event={"ID":"6321b4f4-215e-4e5c-a341-6e970eb6dc9c","Type":"ContainerStarted","Data":"dbbe95ba79b288d91b9c04e5e13ebbe5c4ca212610eed4184f8a9466f0ca8dce"} Dec 01 20:15:01 crc kubenswrapper[4852]: I1201 20:15:01.675149 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" event={"ID":"6321b4f4-215e-4e5c-a341-6e970eb6dc9c","Type":"ContainerStarted","Data":"2e941d1d5fa5f8f3c4da37e5f10b8e619e0365e5fc290f0688a9a4ea6e5d511d"} Dec 01 20:15:01 crc kubenswrapper[4852]: I1201 20:15:01.699694 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" podStartSLOduration=1.699664276 podStartE2EDuration="1.699664276s" podCreationTimestamp="2025-12-01 20:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:15:01.695258649 +0000 UTC m=+621.622340106" watchObservedRunningTime="2025-12-01 20:15:01.699664276 +0000 UTC m=+621.626745723" Dec 01 20:15:02 crc kubenswrapper[4852]: I1201 20:15:02.682338 4852 generic.go:334] "Generic (PLEG): container finished" podID="6321b4f4-215e-4e5c-a341-6e970eb6dc9c" containerID="dbbe95ba79b288d91b9c04e5e13ebbe5c4ca212610eed4184f8a9466f0ca8dce" exitCode=0 Dec 01 20:15:02 crc kubenswrapper[4852]: I1201 20:15:02.682414 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" event={"ID":"6321b4f4-215e-4e5c-a341-6e970eb6dc9c","Type":"ContainerDied","Data":"dbbe95ba79b288d91b9c04e5e13ebbe5c4ca212610eed4184f8a9466f0ca8dce"} Dec 01 20:15:03 crc kubenswrapper[4852]: I1201 20:15:03.908639 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.050446 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume\") pod \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.050542 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh6k6\" (UniqueName: \"kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6\") pod \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.050585 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume\") pod \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\" (UID: \"6321b4f4-215e-4e5c-a341-6e970eb6dc9c\") " Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.051847 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume" (OuterVolumeSpecName: "config-volume") pod "6321b4f4-215e-4e5c-a341-6e970eb6dc9c" (UID: "6321b4f4-215e-4e5c-a341-6e970eb6dc9c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.058205 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6" (OuterVolumeSpecName: "kube-api-access-lh6k6") pod "6321b4f4-215e-4e5c-a341-6e970eb6dc9c" (UID: "6321b4f4-215e-4e5c-a341-6e970eb6dc9c"). InnerVolumeSpecName "kube-api-access-lh6k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.058338 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6321b4f4-215e-4e5c-a341-6e970eb6dc9c" (UID: "6321b4f4-215e-4e5c-a341-6e970eb6dc9c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.151740 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.151787 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.151802 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh6k6\" (UniqueName: \"kubernetes.io/projected/6321b4f4-215e-4e5c-a341-6e970eb6dc9c-kube-api-access-lh6k6\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.695199 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" event={"ID":"6321b4f4-215e-4e5c-a341-6e970eb6dc9c","Type":"ContainerDied","Data":"2e941d1d5fa5f8f3c4da37e5f10b8e619e0365e5fc290f0688a9a4ea6e5d511d"} Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.695564 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e941d1d5fa5f8f3c4da37e5f10b8e619e0365e5fc290f0688a9a4ea6e5d511d" Dec 01 20:15:04 crc kubenswrapper[4852]: I1201 20:15:04.695577 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.192204 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mf8nb"] Dec 01 20:15:36 crc kubenswrapper[4852]: E1201 20:15:36.194910 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6321b4f4-215e-4e5c-a341-6e970eb6dc9c" containerName="collect-profiles" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.195000 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="6321b4f4-215e-4e5c-a341-6e970eb6dc9c" containerName="collect-profiles" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.195220 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="6321b4f4-215e-4e5c-a341-6e970eb6dc9c" containerName="collect-profiles" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.196038 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.198910 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.199583 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.199639 4852 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-d27xq" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.200170 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-q996h"] Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.201120 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-q996h" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.203024 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mf8nb"] Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.204733 4852 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-4pj54" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.214012 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-q996h"] Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.227488 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-twxqd"] Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.228160 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.230204 4852 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-vzswd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.246700 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-twxqd"] Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.301049 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml7wv\" (UniqueName: \"kubernetes.io/projected/6686a8a5-9086-46eb-a481-5ed17b0e1318-kube-api-access-ml7wv\") pod \"cert-manager-cainjector-7f985d654d-mf8nb\" (UID: \"6686a8a5-9086-46eb-a481-5ed17b0e1318\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.402983 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgrln\" (UniqueName: \"kubernetes.io/projected/3155f5b4-1371-40c2-be4a-f099a19001a9-kube-api-access-qgrln\") pod \"cert-manager-webhook-5655c58dd6-twxqd\" (UID: \"3155f5b4-1371-40c2-be4a-f099a19001a9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.403134 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml7wv\" (UniqueName: \"kubernetes.io/projected/6686a8a5-9086-46eb-a481-5ed17b0e1318-kube-api-access-ml7wv\") pod \"cert-manager-cainjector-7f985d654d-mf8nb\" (UID: \"6686a8a5-9086-46eb-a481-5ed17b0e1318\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.403171 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5nnv\" (UniqueName: \"kubernetes.io/projected/95f06176-4d0e-4c13-ae9b-1f0a4b7f6256-kube-api-access-c5nnv\") pod \"cert-manager-5b446d88c5-q996h\" (UID: \"95f06176-4d0e-4c13-ae9b-1f0a4b7f6256\") " pod="cert-manager/cert-manager-5b446d88c5-q996h" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.435713 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml7wv\" (UniqueName: \"kubernetes.io/projected/6686a8a5-9086-46eb-a481-5ed17b0e1318-kube-api-access-ml7wv\") pod \"cert-manager-cainjector-7f985d654d-mf8nb\" (UID: \"6686a8a5-9086-46eb-a481-5ed17b0e1318\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.504942 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgrln\" (UniqueName: \"kubernetes.io/projected/3155f5b4-1371-40c2-be4a-f099a19001a9-kube-api-access-qgrln\") pod \"cert-manager-webhook-5655c58dd6-twxqd\" (UID: \"3155f5b4-1371-40c2-be4a-f099a19001a9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.505063 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5nnv\" (UniqueName: \"kubernetes.io/projected/95f06176-4d0e-4c13-ae9b-1f0a4b7f6256-kube-api-access-c5nnv\") pod \"cert-manager-5b446d88c5-q996h\" (UID: \"95f06176-4d0e-4c13-ae9b-1f0a4b7f6256\") " pod="cert-manager/cert-manager-5b446d88c5-q996h" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.522618 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgrln\" (UniqueName: \"kubernetes.io/projected/3155f5b4-1371-40c2-be4a-f099a19001a9-kube-api-access-qgrln\") pod \"cert-manager-webhook-5655c58dd6-twxqd\" (UID: \"3155f5b4-1371-40c2-be4a-f099a19001a9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.524716 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5nnv\" (UniqueName: \"kubernetes.io/projected/95f06176-4d0e-4c13-ae9b-1f0a4b7f6256-kube-api-access-c5nnv\") pod \"cert-manager-5b446d88c5-q996h\" (UID: \"95f06176-4d0e-4c13-ae9b-1f0a4b7f6256\") " pod="cert-manager/cert-manager-5b446d88c5-q996h" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.528522 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.535939 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-q996h" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.547033 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.755119 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mf8nb"] Dec 01 20:15:36 crc kubenswrapper[4852]: W1201 20:15:36.763077 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6686a8a5_9086_46eb_a481_5ed17b0e1318.slice/crio-97b1c24095d40ede0e000ad9b550b8eef93092341f4cedafa09f8341cd305690 WatchSource:0}: Error finding container 97b1c24095d40ede0e000ad9b550b8eef93092341f4cedafa09f8341cd305690: Status 404 returned error can't find the container with id 97b1c24095d40ede0e000ad9b550b8eef93092341f4cedafa09f8341cd305690 Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.770064 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.781298 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-q996h"] Dec 01 20:15:36 crc kubenswrapper[4852]: W1201 20:15:36.782280 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95f06176_4d0e_4c13_ae9b_1f0a4b7f6256.slice/crio-d6fe496ff321bb28da6f300a3d66c6e046f2b17fba06830f557aa2f8d1c3b4b9 WatchSource:0}: Error finding container d6fe496ff321bb28da6f300a3d66c6e046f2b17fba06830f557aa2f8d1c3b4b9: Status 404 returned error can't find the container with id d6fe496ff321bb28da6f300a3d66c6e046f2b17fba06830f557aa2f8d1c3b4b9 Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.809439 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-twxqd"] Dec 01 20:15:36 crc kubenswrapper[4852]: W1201 20:15:36.814626 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3155f5b4_1371_40c2_be4a_f099a19001a9.slice/crio-3b1c79275845e32ea7d5ca2696480d3e0cd4d42d3c4f062119a69a9ea5794ffb WatchSource:0}: Error finding container 3b1c79275845e32ea7d5ca2696480d3e0cd4d42d3c4f062119a69a9ea5794ffb: Status 404 returned error can't find the container with id 3b1c79275845e32ea7d5ca2696480d3e0cd4d42d3c4f062119a69a9ea5794ffb Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.909584 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-q996h" event={"ID":"95f06176-4d0e-4c13-ae9b-1f0a4b7f6256","Type":"ContainerStarted","Data":"d6fe496ff321bb28da6f300a3d66c6e046f2b17fba06830f557aa2f8d1c3b4b9"} Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.911162 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" event={"ID":"6686a8a5-9086-46eb-a481-5ed17b0e1318","Type":"ContainerStarted","Data":"97b1c24095d40ede0e000ad9b550b8eef93092341f4cedafa09f8341cd305690"} Dec 01 20:15:36 crc kubenswrapper[4852]: I1201 20:15:36.912389 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" event={"ID":"3155f5b4-1371-40c2-be4a-f099a19001a9","Type":"ContainerStarted","Data":"3b1c79275845e32ea7d5ca2696480d3e0cd4d42d3c4f062119a69a9ea5794ffb"} Dec 01 20:15:39 crc kubenswrapper[4852]: I1201 20:15:39.933707 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" event={"ID":"3155f5b4-1371-40c2-be4a-f099a19001a9","Type":"ContainerStarted","Data":"9ceb3b35ee457328ae3e5d550f1a9eac20c7b7ebd31597bfe2cdd788513042a3"} Dec 01 20:15:39 crc kubenswrapper[4852]: I1201 20:15:39.934507 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:39 crc kubenswrapper[4852]: I1201 20:15:39.959697 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" podStartSLOduration=1.054905344 podStartE2EDuration="3.959675961s" podCreationTimestamp="2025-12-01 20:15:36 +0000 UTC" firstStartedPulling="2025-12-01 20:15:36.817481327 +0000 UTC m=+656.744562744" lastFinishedPulling="2025-12-01 20:15:39.722251944 +0000 UTC m=+659.649333361" observedRunningTime="2025-12-01 20:15:39.955040087 +0000 UTC m=+659.882121494" watchObservedRunningTime="2025-12-01 20:15:39.959675961 +0000 UTC m=+659.886757378" Dec 01 20:15:41 crc kubenswrapper[4852]: I1201 20:15:41.954182 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-q996h" event={"ID":"95f06176-4d0e-4c13-ae9b-1f0a4b7f6256","Type":"ContainerStarted","Data":"34d040b332eedd6df127b60e42ca2d596486ed9de912cf7dc8e035d4fd8aadc8"} Dec 01 20:15:41 crc kubenswrapper[4852]: I1201 20:15:41.957132 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" event={"ID":"6686a8a5-9086-46eb-a481-5ed17b0e1318","Type":"ContainerStarted","Data":"aacd550e9ed521b0e9bc9c22ff9f345efe7cbb943469029dda4a9e6aac395062"} Dec 01 20:15:41 crc kubenswrapper[4852]: I1201 20:15:41.978132 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-q996h" podStartSLOduration=1.612918668 podStartE2EDuration="5.978101084s" podCreationTimestamp="2025-12-01 20:15:36 +0000 UTC" firstStartedPulling="2025-12-01 20:15:36.78421552 +0000 UTC m=+656.711296937" lastFinishedPulling="2025-12-01 20:15:41.149397936 +0000 UTC m=+661.076479353" observedRunningTime="2025-12-01 20:15:41.971243031 +0000 UTC m=+661.898324478" watchObservedRunningTime="2025-12-01 20:15:41.978101084 +0000 UTC m=+661.905182541" Dec 01 20:15:41 crc kubenswrapper[4852]: I1201 20:15:41.999680 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-mf8nb" podStartSLOduration=1.630508707 podStartE2EDuration="5.999631126s" podCreationTimestamp="2025-12-01 20:15:36 +0000 UTC" firstStartedPulling="2025-12-01 20:15:36.769663047 +0000 UTC m=+656.696744454" lastFinishedPulling="2025-12-01 20:15:41.138785426 +0000 UTC m=+661.065866873" observedRunningTime="2025-12-01 20:15:41.989939943 +0000 UTC m=+661.917021410" watchObservedRunningTime="2025-12-01 20:15:41.999631126 +0000 UTC m=+661.926712593" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.551169 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-twxqd" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.561800 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-727gr"] Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562603 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-controller" containerID="cri-o://3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562715 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="sbdb" containerID="cri-o://825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562780 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-acl-logging" containerID="cri-o://6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562826 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562849 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-node" containerID="cri-o://4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.562837 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="nbdb" containerID="cri-o://06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.565126 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="northd" containerID="cri-o://b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.629036 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" containerID="cri-o://3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" gracePeriod=30 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.940868 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/3.log" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.943897 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovn-acl-logging/0.log" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.944569 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovn-controller/0.log" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.944997 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.998316 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/2.log" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.999201 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/1.log" Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.999277 4852 generic.go:334] "Generic (PLEG): container finished" podID="6c477f33-3400-4c50-b2fc-e9306088770e" containerID="877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec" exitCode=2 Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.999380 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerDied","Data":"877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec"} Dec 01 20:15:46 crc kubenswrapper[4852]: I1201 20:15:46.999444 4852 scope.go:117] "RemoveContainer" containerID="664ec58611858c382ff65a50923d2b471d66cfcf4a0c13f132518664a684707f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.000226 4852 scope.go:117] "RemoveContainer" containerID="877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.000536 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-cjd9b_openshift-multus(6c477f33-3400-4c50-b2fc-e9306088770e)\"" pod="openshift-multus/multus-cjd9b" podUID="6c477f33-3400-4c50-b2fc-e9306088770e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.002396 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovnkube-controller/3.log" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004267 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9hvq8"] Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004624 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004650 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004664 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004672 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004682 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004689 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004699 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="sbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004706 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="sbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004718 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004725 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004734 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004741 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004750 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kubecfg-setup" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004757 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kubecfg-setup" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004767 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="northd" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004779 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="northd" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004791 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="nbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004799 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="nbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004810 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-node" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004818 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-node" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004831 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004839 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.004857 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-acl-logging" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.004865 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-acl-logging" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005022 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005036 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005050 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005062 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="northd" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005074 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="nbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005091 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="sbdb" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005099 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="kube-rbac-proxy-node" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005112 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005124 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-acl-logging" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005134 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovn-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.005264 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005276 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005372 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.005593 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerName="ovnkube-controller" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.007266 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovn-acl-logging/0.log" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.007389 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.008350 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-727gr_c6dd12e6-57a6-404a-8138-66e9cfa18d00/ovn-controller/0.log" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009141 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009186 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009200 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009213 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009223 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009235 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009246 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" exitCode=143 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009256 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009257 4852 generic.go:334] "Generic (PLEG): container finished" podID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" exitCode=143 Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009240 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009409 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009432 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009479 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009501 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009519 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009535 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009549 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009557 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009565 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009573 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009583 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009591 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009601 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009611 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009619 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009632 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009645 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009655 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009663 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009671 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009679 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009687 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009694 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009703 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009710 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009717 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009729 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009741 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009750 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009758 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009766 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009775 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009782 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009791 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009798 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009806 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009814 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009825 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-727gr" event={"ID":"c6dd12e6-57a6-404a-8138-66e9cfa18d00","Type":"ContainerDied","Data":"18c4fac82d1f69189091ea329e5d88e1fa7d94498be36da4e3a3182100227822"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009837 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009846 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009854 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009861 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009868 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009875 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009882 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009889 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009896 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.009903 4852 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.029544 4852 scope.go:117] "RemoveContainer" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.058689 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.058756 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.058802 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.058787 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.058917 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059576 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059606 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059645 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059655 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash" (OuterVolumeSpecName: "host-slash") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059899 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket" (OuterVolumeSpecName: "log-socket") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059932 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059957 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059972 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.059991 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060007 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060113 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060144 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060164 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060275 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060300 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log" (OuterVolumeSpecName: "node-log") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060571 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060598 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060619 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060653 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060676 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9khtx\" (UniqueName: \"kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060699 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060715 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060738 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060759 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet\") pod \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\" (UID: \"c6dd12e6-57a6-404a-8138-66e9cfa18d00\") " Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060808 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060837 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-var-lib-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060859 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-kubelet\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060884 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-netns\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060900 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-bin\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060916 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-netd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060933 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-config\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060953 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060973 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-script-lib\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.060994 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-systemd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061007 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-log-socket\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061083 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-slash\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061104 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-systemd-units\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061124 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm8nv\" (UniqueName: \"kubernetes.io/projected/ff285c42-5b77-435e-ab89-a3dce9050507-kube-api-access-tm8nv\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061159 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-etc-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061182 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061209 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-ovn\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061228 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-node-log\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061253 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061271 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-env-overrides\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061275 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061288 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff285c42-5b77-435e-ab89-a3dce9050507-ovn-node-metrics-cert\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061661 4852 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061691 4852 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061709 4852 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-slash\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061727 4852 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-log-socket\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061744 4852 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061766 4852 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061787 4852 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061805 4852 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061823 4852 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.061838 4852 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-node-log\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064043 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064705 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064739 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064763 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064798 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064817 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.064940 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.068075 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx" (OuterVolumeSpecName: "kube-api-access-9khtx") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "kube-api-access-9khtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.069053 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.081871 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "c6dd12e6-57a6-404a-8138-66e9cfa18d00" (UID: "c6dd12e6-57a6-404a-8138-66e9cfa18d00"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.084843 4852 scope.go:117] "RemoveContainer" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.102957 4852 scope.go:117] "RemoveContainer" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.122271 4852 scope.go:117] "RemoveContainer" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.140355 4852 scope.go:117] "RemoveContainer" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.157506 4852 scope.go:117] "RemoveContainer" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162615 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162682 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-env-overrides\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162704 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff285c42-5b77-435e-ab89-a3dce9050507-ovn-node-metrics-cert\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162728 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-var-lib-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162746 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-kubelet\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162769 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-netns\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162794 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-bin\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162810 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-netd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162827 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-config\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162853 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162859 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-kubelet\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162876 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-script-lib\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.162677 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163219 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-systemd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163253 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-log-socket\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163276 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-bin\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163308 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-slash\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-netns\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163334 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163395 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-systemd-units\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163409 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-log-socket\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163422 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-systemd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163437 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-systemd-units\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163495 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-cni-netd\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163435 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-slash\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163526 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-var-lib-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163581 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm8nv\" (UniqueName: \"kubernetes.io/projected/ff285c42-5b77-435e-ab89-a3dce9050507-kube-api-access-tm8nv\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.163624 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-etc-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164084 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-env-overrides\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164101 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-config\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164170 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-etc-openvswitch\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164202 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164243 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-ovn\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164277 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-node-log\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164293 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ff285c42-5b77-435e-ab89-a3dce9050507-ovnkube-script-lib\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164314 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-run-ovn\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164317 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164339 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ff285c42-5b77-435e-ab89-a3dce9050507-node-log\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164492 4852 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164513 4852 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164524 4852 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164537 4852 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164548 4852 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164558 4852 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164569 4852 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c6dd12e6-57a6-404a-8138-66e9cfa18d00-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164581 4852 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c6dd12e6-57a6-404a-8138-66e9cfa18d00-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164618 4852 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c6dd12e6-57a6-404a-8138-66e9cfa18d00-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.164628 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9khtx\" (UniqueName: \"kubernetes.io/projected/c6dd12e6-57a6-404a-8138-66e9cfa18d00-kube-api-access-9khtx\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.167400 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff285c42-5b77-435e-ab89-a3dce9050507-ovn-node-metrics-cert\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.177398 4852 scope.go:117] "RemoveContainer" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.184823 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm8nv\" (UniqueName: \"kubernetes.io/projected/ff285c42-5b77-435e-ab89-a3dce9050507-kube-api-access-tm8nv\") pod \"ovnkube-node-9hvq8\" (UID: \"ff285c42-5b77-435e-ab89-a3dce9050507\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.193443 4852 scope.go:117] "RemoveContainer" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.210356 4852 scope.go:117] "RemoveContainer" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.228965 4852 scope.go:117] "RemoveContainer" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.229765 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": container with ID starting with 3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b not found: ID does not exist" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.229850 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} err="failed to get container status \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": rpc error: code = NotFound desc = could not find container \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": container with ID starting with 3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.229920 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.230591 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": container with ID starting with a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14 not found: ID does not exist" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.230621 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} err="failed to get container status \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": rpc error: code = NotFound desc = could not find container \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": container with ID starting with a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.230659 4852 scope.go:117] "RemoveContainer" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.231091 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": container with ID starting with 825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757 not found: ID does not exist" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.231149 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} err="failed to get container status \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": rpc error: code = NotFound desc = could not find container \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": container with ID starting with 825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.231189 4852 scope.go:117] "RemoveContainer" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.231676 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": container with ID starting with 06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798 not found: ID does not exist" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.231709 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} err="failed to get container status \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": rpc error: code = NotFound desc = could not find container \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": container with ID starting with 06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.231727 4852 scope.go:117] "RemoveContainer" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.232109 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": container with ID starting with b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4 not found: ID does not exist" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.232172 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} err="failed to get container status \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": rpc error: code = NotFound desc = could not find container \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": container with ID starting with b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.232211 4852 scope.go:117] "RemoveContainer" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.232693 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": container with ID starting with 19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f not found: ID does not exist" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.232808 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} err="failed to get container status \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": rpc error: code = NotFound desc = could not find container \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": container with ID starting with 19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.232891 4852 scope.go:117] "RemoveContainer" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.233376 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": container with ID starting with 4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c not found: ID does not exist" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.233431 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} err="failed to get container status \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": rpc error: code = NotFound desc = could not find container \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": container with ID starting with 4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.233482 4852 scope.go:117] "RemoveContainer" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.233986 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": container with ID starting with 6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b not found: ID does not exist" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.234020 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} err="failed to get container status \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": rpc error: code = NotFound desc = could not find container \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": container with ID starting with 6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.234094 4852 scope.go:117] "RemoveContainer" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.234573 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": container with ID starting with 3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed not found: ID does not exist" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.234613 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} err="failed to get container status \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": rpc error: code = NotFound desc = could not find container \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": container with ID starting with 3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.234655 4852 scope.go:117] "RemoveContainer" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: E1201 20:15:47.235031 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": container with ID starting with 458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e not found: ID does not exist" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.235070 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} err="failed to get container status \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": rpc error: code = NotFound desc = could not find container \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": container with ID starting with 458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.235096 4852 scope.go:117] "RemoveContainer" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.235610 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} err="failed to get container status \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": rpc error: code = NotFound desc = could not find container \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": container with ID starting with 3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.235635 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236113 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} err="failed to get container status \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": rpc error: code = NotFound desc = could not find container \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": container with ID starting with a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236158 4852 scope.go:117] "RemoveContainer" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236490 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} err="failed to get container status \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": rpc error: code = NotFound desc = could not find container \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": container with ID starting with 825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236526 4852 scope.go:117] "RemoveContainer" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236955 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} err="failed to get container status \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": rpc error: code = NotFound desc = could not find container \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": container with ID starting with 06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.236991 4852 scope.go:117] "RemoveContainer" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.237486 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} err="failed to get container status \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": rpc error: code = NotFound desc = could not find container \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": container with ID starting with b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.237544 4852 scope.go:117] "RemoveContainer" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.237948 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} err="failed to get container status \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": rpc error: code = NotFound desc = could not find container \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": container with ID starting with 19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.237976 4852 scope.go:117] "RemoveContainer" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.238737 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} err="failed to get container status \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": rpc error: code = NotFound desc = could not find container \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": container with ID starting with 4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.238821 4852 scope.go:117] "RemoveContainer" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.239251 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} err="failed to get container status \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": rpc error: code = NotFound desc = could not find container \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": container with ID starting with 6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.239285 4852 scope.go:117] "RemoveContainer" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.239683 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} err="failed to get container status \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": rpc error: code = NotFound desc = could not find container \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": container with ID starting with 3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.239713 4852 scope.go:117] "RemoveContainer" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240014 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} err="failed to get container status \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": rpc error: code = NotFound desc = could not find container \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": container with ID starting with 458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240041 4852 scope.go:117] "RemoveContainer" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240413 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} err="failed to get container status \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": rpc error: code = NotFound desc = could not find container \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": container with ID starting with 3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240472 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240851 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} err="failed to get container status \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": rpc error: code = NotFound desc = could not find container \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": container with ID starting with a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.240881 4852 scope.go:117] "RemoveContainer" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.241190 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} err="failed to get container status \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": rpc error: code = NotFound desc = could not find container \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": container with ID starting with 825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.241238 4852 scope.go:117] "RemoveContainer" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.241869 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} err="failed to get container status \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": rpc error: code = NotFound desc = could not find container \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": container with ID starting with 06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.241901 4852 scope.go:117] "RemoveContainer" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.242297 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} err="failed to get container status \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": rpc error: code = NotFound desc = could not find container \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": container with ID starting with b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.242323 4852 scope.go:117] "RemoveContainer" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.242663 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} err="failed to get container status \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": rpc error: code = NotFound desc = could not find container \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": container with ID starting with 19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.242695 4852 scope.go:117] "RemoveContainer" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243029 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} err="failed to get container status \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": rpc error: code = NotFound desc = could not find container \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": container with ID starting with 4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243057 4852 scope.go:117] "RemoveContainer" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243349 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} err="failed to get container status \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": rpc error: code = NotFound desc = could not find container \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": container with ID starting with 6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243384 4852 scope.go:117] "RemoveContainer" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243726 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} err="failed to get container status \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": rpc error: code = NotFound desc = could not find container \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": container with ID starting with 3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.243749 4852 scope.go:117] "RemoveContainer" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244035 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} err="failed to get container status \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": rpc error: code = NotFound desc = could not find container \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": container with ID starting with 458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244064 4852 scope.go:117] "RemoveContainer" containerID="3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244483 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b"} err="failed to get container status \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": rpc error: code = NotFound desc = could not find container \"3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b\": container with ID starting with 3c998037b92e7bbde32b75f11c75eab62e45e7c63beae06d24996d53b787423b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244505 4852 scope.go:117] "RemoveContainer" containerID="a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244797 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14"} err="failed to get container status \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": rpc error: code = NotFound desc = could not find container \"a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14\": container with ID starting with a59a74539f751bf18612fb31dd3f10de739a5f353219b1e2ff1e9c34c3ef9c14 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.244833 4852 scope.go:117] "RemoveContainer" containerID="825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.245760 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757"} err="failed to get container status \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": rpc error: code = NotFound desc = could not find container \"825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757\": container with ID starting with 825252def854b3fb00196ebc2fcc944df0f4a363562613157a3f42e3c93c3757 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.245797 4852 scope.go:117] "RemoveContainer" containerID="06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246162 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798"} err="failed to get container status \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": rpc error: code = NotFound desc = could not find container \"06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798\": container with ID starting with 06712dd8736f7a7074b67f77105044b21a2254f648faac6769d447a87e642798 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246194 4852 scope.go:117] "RemoveContainer" containerID="b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246512 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4"} err="failed to get container status \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": rpc error: code = NotFound desc = could not find container \"b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4\": container with ID starting with b1423f625d45c6ba68a769be430658708e35e96c0eb6faa5ebd30cb9ece7cbd4 not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246540 4852 scope.go:117] "RemoveContainer" containerID="19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246929 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f"} err="failed to get container status \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": rpc error: code = NotFound desc = could not find container \"19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f\": container with ID starting with 19526b7c60b6ff234a2cb9aa6f8f2e4bc306862750377350337e4443717f607f not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.246958 4852 scope.go:117] "RemoveContainer" containerID="4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.247334 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c"} err="failed to get container status \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": rpc error: code = NotFound desc = could not find container \"4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c\": container with ID starting with 4ec34656c7bac3e09aa7bc38dd8d5ff71aacd01668b04beeee80b665bf87b81c not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.247397 4852 scope.go:117] "RemoveContainer" containerID="6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.247850 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b"} err="failed to get container status \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": rpc error: code = NotFound desc = could not find container \"6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b\": container with ID starting with 6f261240a379cc145ca9696ce45c7442369f86615f957268c73191f59d47c03b not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.247891 4852 scope.go:117] "RemoveContainer" containerID="3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.248250 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed"} err="failed to get container status \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": rpc error: code = NotFound desc = could not find container \"3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed\": container with ID starting with 3160551d2f12a8977afb8fb386867fa3fdff17170c777fa640ebbff19187e2ed not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.248280 4852 scope.go:117] "RemoveContainer" containerID="458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.248587 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e"} err="failed to get container status \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": rpc error: code = NotFound desc = could not find container \"458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e\": container with ID starting with 458f2b526a83e412f2229fe1e3a8844f2cbd3925c334c65dd21024fb1df2b10e not found: ID does not exist" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.332019 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.344318 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-727gr"] Dec 01 20:15:47 crc kubenswrapper[4852]: I1201 20:15:47.348831 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-727gr"] Dec 01 20:15:48 crc kubenswrapper[4852]: I1201 20:15:48.033372 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/2.log" Dec 01 20:15:48 crc kubenswrapper[4852]: I1201 20:15:48.049110 4852 generic.go:334] "Generic (PLEG): container finished" podID="ff285c42-5b77-435e-ab89-a3dce9050507" containerID="0a4eee8e28627ccc4d52bb21bddd50f37563c174ee62b9156a4321b3407bad51" exitCode=0 Dec 01 20:15:48 crc kubenswrapper[4852]: I1201 20:15:48.049211 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerDied","Data":"0a4eee8e28627ccc4d52bb21bddd50f37563c174ee62b9156a4321b3407bad51"} Dec 01 20:15:48 crc kubenswrapper[4852]: I1201 20:15:48.049282 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"2c17a7c51ce0e2469451552afb520254ce0140e557c90b4cea953b695be669e0"} Dec 01 20:15:48 crc kubenswrapper[4852]: I1201 20:15:48.331332 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6dd12e6-57a6-404a-8138-66e9cfa18d00" path="/var/lib/kubelet/pods/c6dd12e6-57a6-404a-8138-66e9cfa18d00/volumes" Dec 01 20:15:49 crc kubenswrapper[4852]: I1201 20:15:49.060481 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"e6cbc6db3e50fcfd4f87c82316c0e1abbc5bff901c92d969261346a5f86075ef"} Dec 01 20:15:49 crc kubenswrapper[4852]: I1201 20:15:49.061041 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"6f80956968dc1dc2ec16ce7bdfb5b248340e4b61377dea638bdf97946b70cc7e"} Dec 01 20:15:50 crc kubenswrapper[4852]: I1201 20:15:50.073280 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"5b4ac2c6a7e6e56bf37ee6da7dc0444dce0a641cb4b33bb231ffd29bed1b1fd0"} Dec 01 20:15:50 crc kubenswrapper[4852]: I1201 20:15:50.073957 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"512eb6d0fc1e4e27ce3c351efdaec2a8032eca3382ae1cfa5fc5fe38efb2e854"} Dec 01 20:15:50 crc kubenswrapper[4852]: I1201 20:15:50.073985 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"da2600bb0f04decb78cc95aac6015f93c0262872cbb0248225b03bd565e62390"} Dec 01 20:15:50 crc kubenswrapper[4852]: I1201 20:15:50.074006 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"b56028e323e419d8c08c488cf05cc060aadb7dd7ecef5462d262b23d719a5b2e"} Dec 01 20:15:52 crc kubenswrapper[4852]: I1201 20:15:52.093927 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"4c201a0cf235f0b2669e7f1f6b36b7c89a7422514655eae5407c317334729c4f"} Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.138621 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" event={"ID":"ff285c42-5b77-435e-ab89-a3dce9050507","Type":"ContainerStarted","Data":"cdb9745a0c9850fb4011a362e196784764d486f4b3949426a248f425a63b2eb0"} Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.139258 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.139411 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.139464 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.176831 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" podStartSLOduration=9.176804313 podStartE2EDuration="9.176804313s" podCreationTimestamp="2025-12-01 20:15:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:15:55.174245474 +0000 UTC m=+675.101326891" watchObservedRunningTime="2025-12-01 20:15:55.176804313 +0000 UTC m=+675.103885730" Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.179198 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:55 crc kubenswrapper[4852]: I1201 20:15:55.181387 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:15:59 crc kubenswrapper[4852]: I1201 20:15:59.320089 4852 scope.go:117] "RemoveContainer" containerID="877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec" Dec 01 20:15:59 crc kubenswrapper[4852]: E1201 20:15:59.320910 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-cjd9b_openshift-multus(6c477f33-3400-4c50-b2fc-e9306088770e)\"" pod="openshift-multus/multus-cjd9b" podUID="6c477f33-3400-4c50-b2fc-e9306088770e" Dec 01 20:16:11 crc kubenswrapper[4852]: I1201 20:16:11.320625 4852 scope.go:117] "RemoveContainer" containerID="877e7e734e43e56c8ff8e3592b48b6991090539bf87c13bbb051096b9e73f1ec" Dec 01 20:16:12 crc kubenswrapper[4852]: I1201 20:16:12.254164 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-cjd9b_6c477f33-3400-4c50-b2fc-e9306088770e/kube-multus/2.log" Dec 01 20:16:12 crc kubenswrapper[4852]: I1201 20:16:12.254838 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-cjd9b" event={"ID":"6c477f33-3400-4c50-b2fc-e9306088770e","Type":"ContainerStarted","Data":"86627e48d0fef22bc798641f92eefa830ba37d6fb3fc6304055181309ec394a5"} Dec 01 20:16:17 crc kubenswrapper[4852]: I1201 20:16:17.370079 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hvq8" Dec 01 20:16:20 crc kubenswrapper[4852]: I1201 20:16:20.230337 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:16:20 crc kubenswrapper[4852]: I1201 20:16:20.231246 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.825633 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh"] Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.827443 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.829714 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.834114 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh"] Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.921777 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b7hv\" (UniqueName: \"kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.921842 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:25 crc kubenswrapper[4852]: I1201 20:16:25.921915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.022974 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b7hv\" (UniqueName: \"kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.023187 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.023294 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.024191 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.024223 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.044032 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b7hv\" (UniqueName: \"kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.147736 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:26 crc kubenswrapper[4852]: I1201 20:16:26.339312 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh"] Dec 01 20:16:26 crc kubenswrapper[4852]: W1201 20:16:26.346579 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c6c4086_9a8b_4563_907e_f1fd309d9cbd.slice/crio-64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd WatchSource:0}: Error finding container 64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd: Status 404 returned error can't find the container with id 64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd Dec 01 20:16:27 crc kubenswrapper[4852]: I1201 20:16:27.358007 4852 generic.go:334] "Generic (PLEG): container finished" podID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerID="4be6f9254a6b8cf7e969231bc08952ef72064499005150e5c65fdaf3d5c56975" exitCode=0 Dec 01 20:16:27 crc kubenswrapper[4852]: I1201 20:16:27.358129 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" event={"ID":"2c6c4086-9a8b-4563-907e-f1fd309d9cbd","Type":"ContainerDied","Data":"4be6f9254a6b8cf7e969231bc08952ef72064499005150e5c65fdaf3d5c56975"} Dec 01 20:16:27 crc kubenswrapper[4852]: I1201 20:16:27.358582 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" event={"ID":"2c6c4086-9a8b-4563-907e-f1fd309d9cbd","Type":"ContainerStarted","Data":"64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd"} Dec 01 20:16:30 crc kubenswrapper[4852]: I1201 20:16:30.377430 4852 generic.go:334] "Generic (PLEG): container finished" podID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerID="99ccbf84bdf72f24199e51993f43cf8ea2e6525658437ecdceb4b91acd7c34f5" exitCode=0 Dec 01 20:16:30 crc kubenswrapper[4852]: I1201 20:16:30.377529 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" event={"ID":"2c6c4086-9a8b-4563-907e-f1fd309d9cbd","Type":"ContainerDied","Data":"99ccbf84bdf72f24199e51993f43cf8ea2e6525658437ecdceb4b91acd7c34f5"} Dec 01 20:16:31 crc kubenswrapper[4852]: I1201 20:16:31.388602 4852 generic.go:334] "Generic (PLEG): container finished" podID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerID="f5002f1d80c90465c12bb7cf1e3c5d19388d58d15f25e74347836ffb792ae639" exitCode=0 Dec 01 20:16:31 crc kubenswrapper[4852]: I1201 20:16:31.388858 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" event={"ID":"2c6c4086-9a8b-4563-907e-f1fd309d9cbd","Type":"ContainerDied","Data":"f5002f1d80c90465c12bb7cf1e3c5d19388d58d15f25e74347836ffb792ae639"} Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.644189 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.814113 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util\") pod \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.814480 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle\") pod \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.814550 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b7hv\" (UniqueName: \"kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv\") pod \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\" (UID: \"2c6c4086-9a8b-4563-907e-f1fd309d9cbd\") " Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.816289 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle" (OuterVolumeSpecName: "bundle") pod "2c6c4086-9a8b-4563-907e-f1fd309d9cbd" (UID: "2c6c4086-9a8b-4563-907e-f1fd309d9cbd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.829101 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv" (OuterVolumeSpecName: "kube-api-access-8b7hv") pod "2c6c4086-9a8b-4563-907e-f1fd309d9cbd" (UID: "2c6c4086-9a8b-4563-907e-f1fd309d9cbd"). InnerVolumeSpecName "kube-api-access-8b7hv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.838374 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util" (OuterVolumeSpecName: "util") pod "2c6c4086-9a8b-4563-907e-f1fd309d9cbd" (UID: "2c6c4086-9a8b-4563-907e-f1fd309d9cbd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.916025 4852 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-util\") on node \"crc\" DevicePath \"\"" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.916055 4852 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:16:32 crc kubenswrapper[4852]: I1201 20:16:32.916065 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b7hv\" (UniqueName: \"kubernetes.io/projected/2c6c4086-9a8b-4563-907e-f1fd309d9cbd-kube-api-access-8b7hv\") on node \"crc\" DevicePath \"\"" Dec 01 20:16:33 crc kubenswrapper[4852]: I1201 20:16:33.406874 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" event={"ID":"2c6c4086-9a8b-4563-907e-f1fd309d9cbd","Type":"ContainerDied","Data":"64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd"} Dec 01 20:16:33 crc kubenswrapper[4852]: I1201 20:16:33.406941 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64980cedc7efc906075a3427dc62156c1c5655453f9b15b55b17fabe2fb4becd" Dec 01 20:16:33 crc kubenswrapper[4852]: I1201 20:16:33.406994 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.471879 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds"] Dec 01 20:16:37 crc kubenswrapper[4852]: E1201 20:16:37.472658 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="util" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.472679 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="util" Dec 01 20:16:37 crc kubenswrapper[4852]: E1201 20:16:37.472695 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="pull" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.472704 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="pull" Dec 01 20:16:37 crc kubenswrapper[4852]: E1201 20:16:37.472724 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="extract" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.472734 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="extract" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.472861 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c6c4086-9a8b-4563-907e-f1fd309d9cbd" containerName="extract" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.473367 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.475605 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qs9qw" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.475605 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.476619 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.487652 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds"] Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.584769 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pbxm\" (UniqueName: \"kubernetes.io/projected/ce552c22-e8f7-4f0d-a5a3-055dd64a6123-kube-api-access-9pbxm\") pod \"nmstate-operator-5b5b58f5c8-jqpds\" (UID: \"ce552c22-e8f7-4f0d-a5a3-055dd64a6123\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.686565 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pbxm\" (UniqueName: \"kubernetes.io/projected/ce552c22-e8f7-4f0d-a5a3-055dd64a6123-kube-api-access-9pbxm\") pod \"nmstate-operator-5b5b58f5c8-jqpds\" (UID: \"ce552c22-e8f7-4f0d-a5a3-055dd64a6123\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.707928 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pbxm\" (UniqueName: \"kubernetes.io/projected/ce552c22-e8f7-4f0d-a5a3-055dd64a6123-kube-api-access-9pbxm\") pod \"nmstate-operator-5b5b58f5c8-jqpds\" (UID: \"ce552c22-e8f7-4f0d-a5a3-055dd64a6123\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" Dec 01 20:16:37 crc kubenswrapper[4852]: I1201 20:16:37.811995 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" Dec 01 20:16:38 crc kubenswrapper[4852]: I1201 20:16:38.107369 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds"] Dec 01 20:16:38 crc kubenswrapper[4852]: I1201 20:16:38.439668 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" event={"ID":"ce552c22-e8f7-4f0d-a5a3-055dd64a6123","Type":"ContainerStarted","Data":"629073cbce37a103bdf79e45f5c38c161ddc9171c843eadcb9d518f6a4332db5"} Dec 01 20:16:40 crc kubenswrapper[4852]: I1201 20:16:40.672397 4852 scope.go:117] "RemoveContainer" containerID="b4ff65ce795b711c66ad71d5a9af9550f810ffcff61ced90d802d8c67c0d8309" Dec 01 20:16:42 crc kubenswrapper[4852]: I1201 20:16:42.467770 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" event={"ID":"ce552c22-e8f7-4f0d-a5a3-055dd64a6123","Type":"ContainerStarted","Data":"26e802a4871c1c3493f55d76bf1a689b3902245432cee9f3d7d352a410f3dd8d"} Dec 01 20:16:42 crc kubenswrapper[4852]: I1201 20:16:42.489765 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-jqpds" podStartSLOduration=1.993538367 podStartE2EDuration="5.489743521s" podCreationTimestamp="2025-12-01 20:16:37 +0000 UTC" firstStartedPulling="2025-12-01 20:16:38.141023997 +0000 UTC m=+718.068105414" lastFinishedPulling="2025-12-01 20:16:41.637229151 +0000 UTC m=+721.564310568" observedRunningTime="2025-12-01 20:16:42.485950523 +0000 UTC m=+722.413032050" watchObservedRunningTime="2025-12-01 20:16:42.489743521 +0000 UTC m=+722.416824958" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.303204 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.307092 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.308885 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.309644 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.316043 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-65kgw" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.316299 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.331479 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.331534 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.331550 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-dxw88"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.332785 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420097 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-nmstate-lock\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420174 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420196 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s8dr\" (UniqueName: \"kubernetes.io/projected/4816cba9-dcc7-48c9-b4b6-a41513a2611b-kube-api-access-9s8dr\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420238 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56v4v\" (UniqueName: \"kubernetes.io/projected/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-kube-api-access-56v4v\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420256 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-ovs-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420274 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-dbus-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.420290 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdf95\" (UniqueName: \"kubernetes.io/projected/fe79317d-951e-446a-9ba2-0d272c5bd48c-kube-api-access-jdf95\") pod \"nmstate-metrics-7f946cbc9-qdrrg\" (UID: \"fe79317d-951e-446a-9ba2-0d272c5bd48c\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.467122 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.467978 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.470993 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.471000 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.471522 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-f9nw2" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.505875 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525029 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525069 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s8dr\" (UniqueName: \"kubernetes.io/projected/4816cba9-dcc7-48c9-b4b6-a41513a2611b-kube-api-access-9s8dr\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525114 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56v4v\" (UniqueName: \"kubernetes.io/projected/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-kube-api-access-56v4v\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525133 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-ovs-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525152 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-dbus-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525171 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdf95\" (UniqueName: \"kubernetes.io/projected/fe79317d-951e-446a-9ba2-0d272c5bd48c-kube-api-access-jdf95\") pod \"nmstate-metrics-7f946cbc9-qdrrg\" (UID: \"fe79317d-951e-446a-9ba2-0d272c5bd48c\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525207 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-nmstate-lock\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: E1201 20:16:46.525204 4852 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 01 20:16:46 crc kubenswrapper[4852]: E1201 20:16:46.525294 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair podName:4816cba9-dcc7-48c9-b4b6-a41513a2611b nodeName:}" failed. No retries permitted until 2025-12-01 20:16:47.025275986 +0000 UTC m=+726.952357403 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-sk6kv" (UID: "4816cba9-dcc7-48c9-b4b6-a41513a2611b") : secret "openshift-nmstate-webhook" not found Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525474 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-ovs-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525547 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-nmstate-lock\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.525713 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-dbus-socket\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.556335 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdf95\" (UniqueName: \"kubernetes.io/projected/fe79317d-951e-446a-9ba2-0d272c5bd48c-kube-api-access-jdf95\") pod \"nmstate-metrics-7f946cbc9-qdrrg\" (UID: \"fe79317d-951e-446a-9ba2-0d272c5bd48c\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.559370 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56v4v\" (UniqueName: \"kubernetes.io/projected/0d3319a8-2dcb-459f-9d3d-6f1eab59ae18-kube-api-access-56v4v\") pod \"nmstate-handler-dxw88\" (UID: \"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18\") " pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.563672 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s8dr\" (UniqueName: \"kubernetes.io/projected/4816cba9-dcc7-48c9-b4b6-a41513a2611b-kube-api-access-9s8dr\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.626884 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5d7a90a-1755-450a-903a-016f63394e43-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.627281 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d7a90a-1755-450a-903a-016f63394e43-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.627303 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxpgz\" (UniqueName: \"kubernetes.io/projected/f5d7a90a-1755-450a-903a-016f63394e43-kube-api-access-pxpgz\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.640857 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.672313 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.689689 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5595cfd85-cfpsv"] Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.693439 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.705969 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5595cfd85-cfpsv"] Dec 01 20:16:46 crc kubenswrapper[4852]: W1201 20:16:46.714375 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d3319a8_2dcb_459f_9d3d_6f1eab59ae18.slice/crio-574f15fb8d3267efde06ec409fdd1d8fdf4caecdaee4e8bc1429bd91dc089458 WatchSource:0}: Error finding container 574f15fb8d3267efde06ec409fdd1d8fdf4caecdaee4e8bc1429bd91dc089458: Status 404 returned error can't find the container with id 574f15fb8d3267efde06ec409fdd1d8fdf4caecdaee4e8bc1429bd91dc089458 Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.728700 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5d7a90a-1755-450a-903a-016f63394e43-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.728753 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d7a90a-1755-450a-903a-016f63394e43-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.728775 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxpgz\" (UniqueName: \"kubernetes.io/projected/f5d7a90a-1755-450a-903a-016f63394e43-kube-api-access-pxpgz\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.730481 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5d7a90a-1755-450a-903a-016f63394e43-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.734933 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5d7a90a-1755-450a-903a-016f63394e43-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.754097 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxpgz\" (UniqueName: \"kubernetes.io/projected/f5d7a90a-1755-450a-903a-016f63394e43-kube-api-access-pxpgz\") pod \"nmstate-console-plugin-7fbb5f6569-vf2l4\" (UID: \"f5d7a90a-1755-450a-903a-016f63394e43\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.782203 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.829969 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-oauth-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.830415 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-trusted-ca-bundle\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.830446 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-oauth-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.830475 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9vk8\" (UniqueName: \"kubernetes.io/projected/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-kube-api-access-c9vk8\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.830789 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.831004 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-service-ca\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.831031 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.878503 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg"] Dec 01 20:16:46 crc kubenswrapper[4852]: W1201 20:16:46.898303 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe79317d_951e_446a_9ba2_0d272c5bd48c.slice/crio-548b232535e4337bdc4b4371160f8f8e1327a1bf6945145f64a6939ed762ef3c WatchSource:0}: Error finding container 548b232535e4337bdc4b4371160f8f8e1327a1bf6945145f64a6939ed762ef3c: Status 404 returned error can't find the container with id 548b232535e4337bdc4b4371160f8f8e1327a1bf6945145f64a6939ed762ef3c Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932330 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-oauth-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932383 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-trusted-ca-bundle\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932408 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-oauth-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932426 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9vk8\" (UniqueName: \"kubernetes.io/projected/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-kube-api-access-c9vk8\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932490 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932828 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.932864 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-service-ca\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.933611 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-service-ca\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.933787 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-oauth-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.935345 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.936376 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-trusted-ca-bundle\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.939713 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-oauth-config\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.939817 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-console-serving-cert\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.961965 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9vk8\" (UniqueName: \"kubernetes.io/projected/6722f9e1-9256-4809-9d2d-3d294c9fb9f6-kube-api-access-c9vk8\") pod \"console-5595cfd85-cfpsv\" (UID: \"6722f9e1-9256-4809-9d2d-3d294c9fb9f6\") " pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:46 crc kubenswrapper[4852]: I1201 20:16:46.997682 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4"] Dec 01 20:16:47 crc kubenswrapper[4852]: W1201 20:16:47.003248 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5d7a90a_1755_450a_903a_016f63394e43.slice/crio-5d0c7e8fda1b3111ef7bdbd493b5d293c32b3f5682380644c06336776e7ecddd WatchSource:0}: Error finding container 5d0c7e8fda1b3111ef7bdbd493b5d293c32b3f5682380644c06336776e7ecddd: Status 404 returned error can't find the container with id 5d0c7e8fda1b3111ef7bdbd493b5d293c32b3f5682380644c06336776e7ecddd Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.035270 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.039140 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4816cba9-dcc7-48c9-b4b6-a41513a2611b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sk6kv\" (UID: \"4816cba9-dcc7-48c9-b4b6-a41513a2611b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.039500 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.264241 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.268168 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5595cfd85-cfpsv"] Dec 01 20:16:47 crc kubenswrapper[4852]: W1201 20:16:47.275929 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6722f9e1_9256_4809_9d2d_3d294c9fb9f6.slice/crio-f2ad5d6e2cba9efd4192b22466c1f260693aa759fbd54d6f21c36194261a45e7 WatchSource:0}: Error finding container f2ad5d6e2cba9efd4192b22466c1f260693aa759fbd54d6f21c36194261a45e7: Status 404 returned error can't find the container with id f2ad5d6e2cba9efd4192b22466c1f260693aa759fbd54d6f21c36194261a45e7 Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.493908 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv"] Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.499249 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dxw88" event={"ID":"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18","Type":"ContainerStarted","Data":"574f15fb8d3267efde06ec409fdd1d8fdf4caecdaee4e8bc1429bd91dc089458"} Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.500878 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" event={"ID":"f5d7a90a-1755-450a-903a-016f63394e43","Type":"ContainerStarted","Data":"5d0c7e8fda1b3111ef7bdbd493b5d293c32b3f5682380644c06336776e7ecddd"} Dec 01 20:16:47 crc kubenswrapper[4852]: W1201 20:16:47.502352 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4816cba9_dcc7_48c9_b4b6_a41513a2611b.slice/crio-65a95ab67d03e1176f2b49a3c96af81628d17000ab201c5a1997f0b6f2e9d37a WatchSource:0}: Error finding container 65a95ab67d03e1176f2b49a3c96af81628d17000ab201c5a1997f0b6f2e9d37a: Status 404 returned error can't find the container with id 65a95ab67d03e1176f2b49a3c96af81628d17000ab201c5a1997f0b6f2e9d37a Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.503540 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5595cfd85-cfpsv" event={"ID":"6722f9e1-9256-4809-9d2d-3d294c9fb9f6","Type":"ContainerStarted","Data":"694c615422b4c0c7e85de75cee631358f79a26197547dd598b6eeac0d82376a7"} Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.503601 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5595cfd85-cfpsv" event={"ID":"6722f9e1-9256-4809-9d2d-3d294c9fb9f6","Type":"ContainerStarted","Data":"f2ad5d6e2cba9efd4192b22466c1f260693aa759fbd54d6f21c36194261a45e7"} Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.505287 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" event={"ID":"fe79317d-951e-446a-9ba2-0d272c5bd48c","Type":"ContainerStarted","Data":"548b232535e4337bdc4b4371160f8f8e1327a1bf6945145f64a6939ed762ef3c"} Dec 01 20:16:47 crc kubenswrapper[4852]: I1201 20:16:47.523103 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5595cfd85-cfpsv" podStartSLOduration=1.523086043 podStartE2EDuration="1.523086043s" podCreationTimestamp="2025-12-01 20:16:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:16:47.520885805 +0000 UTC m=+727.447967222" watchObservedRunningTime="2025-12-01 20:16:47.523086043 +0000 UTC m=+727.450167460" Dec 01 20:16:48 crc kubenswrapper[4852]: I1201 20:16:48.522590 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" event={"ID":"4816cba9-dcc7-48c9-b4b6-a41513a2611b","Type":"ContainerStarted","Data":"65a95ab67d03e1176f2b49a3c96af81628d17000ab201c5a1997f0b6f2e9d37a"} Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.229930 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.230357 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.539924 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dxw88" event={"ID":"0d3319a8-2dcb-459f-9d3d-6f1eab59ae18","Type":"ContainerStarted","Data":"727e5d4cefab173d58c1b37d167e52d676810ee4032752baeb523cfe159268f3"} Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.540055 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.541774 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" event={"ID":"f5d7a90a-1755-450a-903a-016f63394e43","Type":"ContainerStarted","Data":"a28c01d154ae27a797cc967e6e1a33a9f8032cf8e6d7372293cfbb9483db0e6c"} Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.544410 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" event={"ID":"4816cba9-dcc7-48c9-b4b6-a41513a2611b","Type":"ContainerStarted","Data":"a6e320a9a73916477d74b3fc7c9fe25d0dac3fe8eb2b986e22e6819e46fd754a"} Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.544823 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.546740 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" event={"ID":"fe79317d-951e-446a-9ba2-0d272c5bd48c","Type":"ContainerStarted","Data":"7d71429607eb0355cb23cc0bb89c1b1be9cd85f327090e98fcf9133d607c25e4"} Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.563646 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-dxw88" podStartSLOduration=1.203172977 podStartE2EDuration="4.563617491s" podCreationTimestamp="2025-12-01 20:16:46 +0000 UTC" firstStartedPulling="2025-12-01 20:16:46.716544886 +0000 UTC m=+726.643626303" lastFinishedPulling="2025-12-01 20:16:50.07698938 +0000 UTC m=+730.004070817" observedRunningTime="2025-12-01 20:16:50.554546108 +0000 UTC m=+730.481627525" watchObservedRunningTime="2025-12-01 20:16:50.563617491 +0000 UTC m=+730.490698918" Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.573900 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-vf2l4" podStartSLOduration=1.502569284 podStartE2EDuration="4.57386959s" podCreationTimestamp="2025-12-01 20:16:46 +0000 UTC" firstStartedPulling="2025-12-01 20:16:47.005629172 +0000 UTC m=+726.932710589" lastFinishedPulling="2025-12-01 20:16:50.076929478 +0000 UTC m=+730.004010895" observedRunningTime="2025-12-01 20:16:50.573817848 +0000 UTC m=+730.500899305" watchObservedRunningTime="2025-12-01 20:16:50.57386959 +0000 UTC m=+730.500951007" Dec 01 20:16:50 crc kubenswrapper[4852]: I1201 20:16:50.608078 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" podStartSLOduration=2.028383227 podStartE2EDuration="4.607949522s" podCreationTimestamp="2025-12-01 20:16:46 +0000 UTC" firstStartedPulling="2025-12-01 20:16:47.504352 +0000 UTC m=+727.431433417" lastFinishedPulling="2025-12-01 20:16:50.083918295 +0000 UTC m=+730.010999712" observedRunningTime="2025-12-01 20:16:50.598938541 +0000 UTC m=+730.526019958" watchObservedRunningTime="2025-12-01 20:16:50.607949522 +0000 UTC m=+730.535030969" Dec 01 20:16:53 crc kubenswrapper[4852]: I1201 20:16:53.569221 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" event={"ID":"fe79317d-951e-446a-9ba2-0d272c5bd48c","Type":"ContainerStarted","Data":"c734f53c5b88d2936d17b9262df3f6a7593f5e9e6d53af74eec28812b79340de"} Dec 01 20:16:53 crc kubenswrapper[4852]: I1201 20:16:53.592361 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qdrrg" podStartSLOduration=1.6021694480000002 podStartE2EDuration="7.59233632s" podCreationTimestamp="2025-12-01 20:16:46 +0000 UTC" firstStartedPulling="2025-12-01 20:16:46.901333733 +0000 UTC m=+726.828415140" lastFinishedPulling="2025-12-01 20:16:52.891500595 +0000 UTC m=+732.818582012" observedRunningTime="2025-12-01 20:16:53.589615755 +0000 UTC m=+733.516697222" watchObservedRunningTime="2025-12-01 20:16:53.59233632 +0000 UTC m=+733.519417777" Dec 01 20:16:56 crc kubenswrapper[4852]: I1201 20:16:56.697840 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-dxw88" Dec 01 20:16:57 crc kubenswrapper[4852]: I1201 20:16:57.040785 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:57 crc kubenswrapper[4852]: I1201 20:16:57.040849 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:57 crc kubenswrapper[4852]: I1201 20:16:57.046097 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:57 crc kubenswrapper[4852]: I1201 20:16:57.600273 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5595cfd85-cfpsv" Dec 01 20:16:57 crc kubenswrapper[4852]: I1201 20:16:57.651062 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:17:07 crc kubenswrapper[4852]: I1201 20:17:07.271945 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sk6kv" Dec 01 20:17:11 crc kubenswrapper[4852]: I1201 20:17:11.484728 4852 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.229826 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.230481 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.230541 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.231200 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.231259 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d" gracePeriod=600 Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.749050 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d" exitCode=0 Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.749127 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d"} Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.749412 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e"} Dec 01 20:17:20 crc kubenswrapper[4852]: I1201 20:17:20.749442 4852 scope.go:117] "RemoveContainer" containerID="6b2e8fffee4aa2c65b0402d8b297901a17afd0d5c47dea53650541ca8a73ef19" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.620350 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm"] Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.621405 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.623235 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.632826 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm"] Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.678938 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.679030 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnm6x\" (UniqueName: \"kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.679187 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.780100 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.780208 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnm6x\" (UniqueName: \"kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.780243 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.780789 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.780915 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.806066 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnm6x\" (UniqueName: \"kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:21 crc kubenswrapper[4852]: I1201 20:17:21.937822 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:22 crc kubenswrapper[4852]: I1201 20:17:22.229914 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm"] Dec 01 20:17:22 crc kubenswrapper[4852]: W1201 20:17:22.236564 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26da957e_6f2b_4801_9186_d46cb87b1cc7.slice/crio-4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a WatchSource:0}: Error finding container 4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a: Status 404 returned error can't find the container with id 4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a Dec 01 20:17:22 crc kubenswrapper[4852]: I1201 20:17:22.703039 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-glnwk" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" containerID="cri-o://118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698" gracePeriod=15 Dec 01 20:17:22 crc kubenswrapper[4852]: I1201 20:17:22.763612 4852 generic.go:334] "Generic (PLEG): container finished" podID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerID="b42bf344d1a5bfb60608bd64e7c2e449bba3d2ef4b43722a34dfe6734b0fb826" exitCode=0 Dec 01 20:17:22 crc kubenswrapper[4852]: I1201 20:17:22.763667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" event={"ID":"26da957e-6f2b-4801-9186-d46cb87b1cc7","Type":"ContainerDied","Data":"b42bf344d1a5bfb60608bd64e7c2e449bba3d2ef4b43722a34dfe6734b0fb826"} Dec 01 20:17:22 crc kubenswrapper[4852]: I1201 20:17:22.763700 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" event={"ID":"26da957e-6f2b-4801-9186-d46cb87b1cc7","Type":"ContainerStarted","Data":"4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a"} Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.096500 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-glnwk_ab102fcf-71d9-40fc-9b9d-79b697e7864c/console/0.log" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.096566 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199041 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199114 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199152 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199199 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb648\" (UniqueName: \"kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199231 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.199642 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200358 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200378 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config" (OuterVolumeSpecName: "console-config") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200389 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca\") pod \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\" (UID: \"ab102fcf-71d9-40fc-9b9d-79b697e7864c\") " Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200787 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca" (OuterVolumeSpecName: "service-ca") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200810 4852 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200941 4852 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.200986 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.206631 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648" (OuterVolumeSpecName: "kube-api-access-bb648") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "kube-api-access-bb648". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.206871 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.207087 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ab102fcf-71d9-40fc-9b9d-79b697e7864c" (UID: "ab102fcf-71d9-40fc-9b9d-79b697e7864c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.301765 4852 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.301807 4852 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.301820 4852 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ab102fcf-71d9-40fc-9b9d-79b697e7864c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.301831 4852 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ab102fcf-71d9-40fc-9b9d-79b697e7864c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.301844 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb648\" (UniqueName: \"kubernetes.io/projected/ab102fcf-71d9-40fc-9b9d-79b697e7864c-kube-api-access-bb648\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.773251 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-glnwk_ab102fcf-71d9-40fc-9b9d-79b697e7864c/console/0.log" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.773840 4852 generic.go:334] "Generic (PLEG): container finished" podID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerID="118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698" exitCode=2 Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.773927 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-glnwk" event={"ID":"ab102fcf-71d9-40fc-9b9d-79b697e7864c","Type":"ContainerDied","Data":"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698"} Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.773981 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-glnwk" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.774124 4852 scope.go:117] "RemoveContainer" containerID="118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.774003 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-glnwk" event={"ID":"ab102fcf-71d9-40fc-9b9d-79b697e7864c","Type":"ContainerDied","Data":"572753ab8cdd25c65fdea496572b08ac646fbc60aa3a06b32e8505a56960b8cd"} Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.813000 4852 scope.go:117] "RemoveContainer" containerID="118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698" Dec 01 20:17:23 crc kubenswrapper[4852]: E1201 20:17:23.814174 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698\": container with ID starting with 118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698 not found: ID does not exist" containerID="118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.814322 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698"} err="failed to get container status \"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698\": rpc error: code = NotFound desc = could not find container \"118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698\": container with ID starting with 118a12348f7b066c77bd9de830b3ac81d9c7dac3e6776cf7e82c4dda9e246698 not found: ID does not exist" Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.828226 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:17:23 crc kubenswrapper[4852]: I1201 20:17:23.836048 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-glnwk"] Dec 01 20:17:24 crc kubenswrapper[4852]: I1201 20:17:24.327144 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" path="/var/lib/kubelet/pods/ab102fcf-71d9-40fc-9b9d-79b697e7864c/volumes" Dec 01 20:17:24 crc kubenswrapper[4852]: I1201 20:17:24.789512 4852 generic.go:334] "Generic (PLEG): container finished" podID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerID="95f50134713c66d0532b4b9b93fcac74cbba90a4789745c134adfe5a5ee2b364" exitCode=0 Dec 01 20:17:24 crc kubenswrapper[4852]: I1201 20:17:24.789621 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" event={"ID":"26da957e-6f2b-4801-9186-d46cb87b1cc7","Type":"ContainerDied","Data":"95f50134713c66d0532b4b9b93fcac74cbba90a4789745c134adfe5a5ee2b364"} Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.179378 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:25 crc kubenswrapper[4852]: E1201 20:17:25.179683 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.179696 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.179810 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab102fcf-71d9-40fc-9b9d-79b697e7864c" containerName="console" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.180729 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.193215 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.232508 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.232628 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc9ds\" (UniqueName: \"kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.232662 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.333727 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.333832 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc9ds\" (UniqueName: \"kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.333865 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.334266 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.334300 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.356393 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc9ds\" (UniqueName: \"kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds\") pod \"redhat-operators-bcxg2\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.494427 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.717611 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:25 crc kubenswrapper[4852]: W1201 20:17:25.726701 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19baac0d_eee6_4431_960e_9b0b4b0a6cdc.slice/crio-4e57bf6ddfb22fc09ef49177a344f53c83f9d2c2c3c88a5f29b800bfdbb9d0a6 WatchSource:0}: Error finding container 4e57bf6ddfb22fc09ef49177a344f53c83f9d2c2c3c88a5f29b800bfdbb9d0a6: Status 404 returned error can't find the container with id 4e57bf6ddfb22fc09ef49177a344f53c83f9d2c2c3c88a5f29b800bfdbb9d0a6 Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.799130 4852 generic.go:334] "Generic (PLEG): container finished" podID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerID="c48745b729dfe5c6f86f17da38885b9e88bbe8babed891c0d6c9f977e701351c" exitCode=0 Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.799314 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" event={"ID":"26da957e-6f2b-4801-9186-d46cb87b1cc7","Type":"ContainerDied","Data":"c48745b729dfe5c6f86f17da38885b9e88bbe8babed891c0d6c9f977e701351c"} Dec 01 20:17:25 crc kubenswrapper[4852]: I1201 20:17:25.800486 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerStarted","Data":"4e57bf6ddfb22fc09ef49177a344f53c83f9d2c2c3c88a5f29b800bfdbb9d0a6"} Dec 01 20:17:26 crc kubenswrapper[4852]: I1201 20:17:26.807540 4852 generic.go:334] "Generic (PLEG): container finished" podID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerID="2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727" exitCode=0 Dec 01 20:17:26 crc kubenswrapper[4852]: I1201 20:17:26.807630 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerDied","Data":"2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727"} Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.042082 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.058003 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util\") pod \"26da957e-6f2b-4801-9186-d46cb87b1cc7\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.058126 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle\") pod \"26da957e-6f2b-4801-9186-d46cb87b1cc7\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.058157 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnm6x\" (UniqueName: \"kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x\") pod \"26da957e-6f2b-4801-9186-d46cb87b1cc7\" (UID: \"26da957e-6f2b-4801-9186-d46cb87b1cc7\") " Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.059483 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle" (OuterVolumeSpecName: "bundle") pod "26da957e-6f2b-4801-9186-d46cb87b1cc7" (UID: "26da957e-6f2b-4801-9186-d46cb87b1cc7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.070656 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x" (OuterVolumeSpecName: "kube-api-access-hnm6x") pod "26da957e-6f2b-4801-9186-d46cb87b1cc7" (UID: "26da957e-6f2b-4801-9186-d46cb87b1cc7"). InnerVolumeSpecName "kube-api-access-hnm6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.080342 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util" (OuterVolumeSpecName: "util") pod "26da957e-6f2b-4801-9186-d46cb87b1cc7" (UID: "26da957e-6f2b-4801-9186-d46cb87b1cc7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.160031 4852 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-util\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.160299 4852 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/26da957e-6f2b-4801-9186-d46cb87b1cc7-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.160374 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnm6x\" (UniqueName: \"kubernetes.io/projected/26da957e-6f2b-4801-9186-d46cb87b1cc7-kube-api-access-hnm6x\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.815735 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" event={"ID":"26da957e-6f2b-4801-9186-d46cb87b1cc7","Type":"ContainerDied","Data":"4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a"} Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.815778 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fd533c3f1bf4ad3163139206c4ca2227bd36e37f56fa1aeb2be3c31e2de686a" Dec 01 20:17:27 crc kubenswrapper[4852]: I1201 20:17:27.816711 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm" Dec 01 20:17:28 crc kubenswrapper[4852]: I1201 20:17:28.823850 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerStarted","Data":"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e"} Dec 01 20:17:29 crc kubenswrapper[4852]: I1201 20:17:29.830407 4852 generic.go:334] "Generic (PLEG): container finished" podID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerID="9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e" exitCode=0 Dec 01 20:17:29 crc kubenswrapper[4852]: I1201 20:17:29.830470 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerDied","Data":"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e"} Dec 01 20:17:30 crc kubenswrapper[4852]: I1201 20:17:30.838630 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerStarted","Data":"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff"} Dec 01 20:17:30 crc kubenswrapper[4852]: I1201 20:17:30.859788 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bcxg2" podStartSLOduration=2.174237763 podStartE2EDuration="5.859762305s" podCreationTimestamp="2025-12-01 20:17:25 +0000 UTC" firstStartedPulling="2025-12-01 20:17:26.809256752 +0000 UTC m=+766.736338169" lastFinishedPulling="2025-12-01 20:17:30.494781294 +0000 UTC m=+770.421862711" observedRunningTime="2025-12-01 20:17:30.854917635 +0000 UTC m=+770.781999062" watchObservedRunningTime="2025-12-01 20:17:30.859762305 +0000 UTC m=+770.786843732" Dec 01 20:17:35 crc kubenswrapper[4852]: I1201 20:17:35.494628 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:35 crc kubenswrapper[4852]: I1201 20:17:35.495748 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:36 crc kubenswrapper[4852]: I1201 20:17:36.560257 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bcxg2" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="registry-server" probeResult="failure" output=< Dec 01 20:17:36 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:17:36 crc kubenswrapper[4852]: > Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.161948 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p"] Dec 01 20:17:37 crc kubenswrapper[4852]: E1201 20:17:37.162286 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="pull" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.162308 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="pull" Dec 01 20:17:37 crc kubenswrapper[4852]: E1201 20:17:37.162322 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="util" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.162330 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="util" Dec 01 20:17:37 crc kubenswrapper[4852]: E1201 20:17:37.162359 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="extract" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.162366 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="extract" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.162540 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="26da957e-6f2b-4801-9186-d46cb87b1cc7" containerName="extract" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.163208 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.166690 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.166988 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.167184 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.167751 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.168018 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2qb5c" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.196959 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p"] Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.206523 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-webhook-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.206591 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-apiservice-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.206617 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsghq\" (UniqueName: \"kubernetes.io/projected/573029ff-5b2f-408d-aa44-da5d6ab202c0-kube-api-access-wsghq\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.308916 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-apiservice-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.308979 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsghq\" (UniqueName: \"kubernetes.io/projected/573029ff-5b2f-408d-aa44-da5d6ab202c0-kube-api-access-wsghq\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.309049 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-webhook-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.318842 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-webhook-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.329544 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573029ff-5b2f-408d-aa44-da5d6ab202c0-apiservice-cert\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.340289 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsghq\" (UniqueName: \"kubernetes.io/projected/573029ff-5b2f-408d-aa44-da5d6ab202c0-kube-api-access-wsghq\") pod \"metallb-operator-controller-manager-6ddd45494c-nkr5p\" (UID: \"573029ff-5b2f-408d-aa44-da5d6ab202c0\") " pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.476007 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk"] Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.477179 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.479835 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.480442 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.480801 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-sfxc7" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.485964 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.501588 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk"] Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.512206 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-webhook-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.512285 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnhg4\" (UniqueName: \"kubernetes.io/projected/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-kube-api-access-qnhg4\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.512337 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-apiservice-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.614290 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnhg4\" (UniqueName: \"kubernetes.io/projected/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-kube-api-access-qnhg4\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.614364 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-apiservice-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.614425 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-webhook-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.620184 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-webhook-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.624484 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-apiservice-cert\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.651217 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnhg4\" (UniqueName: \"kubernetes.io/projected/13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e-kube-api-access-qnhg4\") pod \"metallb-operator-webhook-server-58df767f78-7ndmk\" (UID: \"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e\") " pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.795814 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:37 crc kubenswrapper[4852]: I1201 20:17:37.952304 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p"] Dec 01 20:17:38 crc kubenswrapper[4852]: I1201 20:17:38.214892 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk"] Dec 01 20:17:38 crc kubenswrapper[4852]: W1201 20:17:38.225556 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13a3d5e7_f415_49e6_ac9b_6b9a3ab2027e.slice/crio-416c035d4bc254e43dccdccf68c1d84b5d1ac2e288fab5c343fa9b7fe0db5336 WatchSource:0}: Error finding container 416c035d4bc254e43dccdccf68c1d84b5d1ac2e288fab5c343fa9b7fe0db5336: Status 404 returned error can't find the container with id 416c035d4bc254e43dccdccf68c1d84b5d1ac2e288fab5c343fa9b7fe0db5336 Dec 01 20:17:38 crc kubenswrapper[4852]: I1201 20:17:38.892230 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" event={"ID":"573029ff-5b2f-408d-aa44-da5d6ab202c0","Type":"ContainerStarted","Data":"68d7f9746a57267bc3ad86c5d2d859436e8423e528b55e81bade2dcbc717b3ae"} Dec 01 20:17:38 crc kubenswrapper[4852]: I1201 20:17:38.894389 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" event={"ID":"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e","Type":"ContainerStarted","Data":"416c035d4bc254e43dccdccf68c1d84b5d1ac2e288fab5c343fa9b7fe0db5336"} Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.563683 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.643636 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.811529 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.994064 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" event={"ID":"13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e","Type":"ContainerStarted","Data":"7fd3aaf0fde0c725eb571b9a3d171e2293fe142d06525fcc6079687a67fcd36b"} Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.994556 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:17:45 crc kubenswrapper[4852]: I1201 20:17:45.996881 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" event={"ID":"573029ff-5b2f-408d-aa44-da5d6ab202c0","Type":"ContainerStarted","Data":"10c3cfa11eacd37dba544752eca026850fde82a285421fbc1494f31ec4d82355"} Dec 01 20:17:46 crc kubenswrapper[4852]: I1201 20:17:46.015340 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" podStartSLOduration=1.749653044 podStartE2EDuration="9.015315864s" podCreationTimestamp="2025-12-01 20:17:37 +0000 UTC" firstStartedPulling="2025-12-01 20:17:38.24431755 +0000 UTC m=+778.171398967" lastFinishedPulling="2025-12-01 20:17:45.50998037 +0000 UTC m=+785.437061787" observedRunningTime="2025-12-01 20:17:46.014092116 +0000 UTC m=+785.941173553" watchObservedRunningTime="2025-12-01 20:17:46.015315864 +0000 UTC m=+785.942397281" Dec 01 20:17:46 crc kubenswrapper[4852]: I1201 20:17:46.041352 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" podStartSLOduration=1.517284583 podStartE2EDuration="9.041326834s" podCreationTimestamp="2025-12-01 20:17:37 +0000 UTC" firstStartedPulling="2025-12-01 20:17:37.967591608 +0000 UTC m=+777.894673025" lastFinishedPulling="2025-12-01 20:17:45.491633859 +0000 UTC m=+785.418715276" observedRunningTime="2025-12-01 20:17:46.039556979 +0000 UTC m=+785.966638406" watchObservedRunningTime="2025-12-01 20:17:46.041326834 +0000 UTC m=+785.968408251" Dec 01 20:17:47 crc kubenswrapper[4852]: I1201 20:17:47.003239 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:17:47 crc kubenswrapper[4852]: I1201 20:17:47.003447 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bcxg2" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="registry-server" containerID="cri-o://17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff" gracePeriod=2 Dec 01 20:17:47 crc kubenswrapper[4852]: I1201 20:17:47.968584 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.067723 4852 generic.go:334] "Generic (PLEG): container finished" podID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerID="17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff" exitCode=0 Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.068566 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcxg2" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.068959 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerDied","Data":"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff"} Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.068989 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcxg2" event={"ID":"19baac0d-eee6-4431-960e-9b0b4b0a6cdc","Type":"ContainerDied","Data":"4e57bf6ddfb22fc09ef49177a344f53c83f9d2c2c3c88a5f29b800bfdbb9d0a6"} Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.069007 4852 scope.go:117] "RemoveContainer" containerID="17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.097749 4852 scope.go:117] "RemoveContainer" containerID="9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.109108 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content\") pod \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.109163 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc9ds\" (UniqueName: \"kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds\") pod \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.109243 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities\") pod \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\" (UID: \"19baac0d-eee6-4431-960e-9b0b4b0a6cdc\") " Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.110134 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities" (OuterVolumeSpecName: "utilities") pod "19baac0d-eee6-4431-960e-9b0b4b0a6cdc" (UID: "19baac0d-eee6-4431-960e-9b0b4b0a6cdc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.114541 4852 scope.go:117] "RemoveContainer" containerID="2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.117799 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds" (OuterVolumeSpecName: "kube-api-access-wc9ds") pod "19baac0d-eee6-4431-960e-9b0b4b0a6cdc" (UID: "19baac0d-eee6-4431-960e-9b0b4b0a6cdc"). InnerVolumeSpecName "kube-api-access-wc9ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.157354 4852 scope.go:117] "RemoveContainer" containerID="17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff" Dec 01 20:17:48 crc kubenswrapper[4852]: E1201 20:17:48.157857 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff\": container with ID starting with 17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff not found: ID does not exist" containerID="17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.157905 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff"} err="failed to get container status \"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff\": rpc error: code = NotFound desc = could not find container \"17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff\": container with ID starting with 17afd69c54e9dd1b5d2bf44630b13dd853892d376269186837715d23760fddff not found: ID does not exist" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.157929 4852 scope.go:117] "RemoveContainer" containerID="9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e" Dec 01 20:17:48 crc kubenswrapper[4852]: E1201 20:17:48.158361 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e\": container with ID starting with 9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e not found: ID does not exist" containerID="9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.158409 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e"} err="failed to get container status \"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e\": rpc error: code = NotFound desc = could not find container \"9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e\": container with ID starting with 9c4dcbee0ef4858606881e94a224c78f6f99f80e66d1fb4a0a21284039211c9e not found: ID does not exist" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.158442 4852 scope.go:117] "RemoveContainer" containerID="2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727" Dec 01 20:17:48 crc kubenswrapper[4852]: E1201 20:17:48.158772 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727\": container with ID starting with 2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727 not found: ID does not exist" containerID="2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.158795 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727"} err="failed to get container status \"2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727\": rpc error: code = NotFound desc = could not find container \"2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727\": container with ID starting with 2370baac5be4e0cbdf93b2429b63c5bf3c79239871a2ccda6f1958722c120727 not found: ID does not exist" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.213889 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19baac0d-eee6-4431-960e-9b0b4b0a6cdc" (UID: "19baac0d-eee6-4431-960e-9b0b4b0a6cdc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.215240 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.215276 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.215291 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc9ds\" (UniqueName: \"kubernetes.io/projected/19baac0d-eee6-4431-960e-9b0b4b0a6cdc-kube-api-access-wc9ds\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.391735 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:48 crc kubenswrapper[4852]: I1201 20:17:48.398599 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bcxg2"] Dec 01 20:17:50 crc kubenswrapper[4852]: I1201 20:17:50.331152 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" path="/var/lib/kubelet/pods/19baac0d-eee6-4431-960e-9b0b4b0a6cdc/volumes" Dec 01 20:17:57 crc kubenswrapper[4852]: I1201 20:17:57.803582 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-58df767f78-7ndmk" Dec 01 20:18:17 crc kubenswrapper[4852]: I1201 20:18:17.488781 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6ddd45494c-nkr5p" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.231992 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc"] Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.232923 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="extract-content" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.232947 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="extract-content" Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.232961 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="extract-utilities" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.232968 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="extract-utilities" Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.232980 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="registry-server" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.232987 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="registry-server" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.233137 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="19baac0d-eee6-4431-960e-9b0b4b0a6cdc" containerName="registry-server" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.233697 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.236247 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.237654 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lbhk5" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.245165 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-dhxv4"] Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.248038 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.250302 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc"] Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.250609 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.251341 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.330261 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-bs8kz"] Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.331320 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.334706 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.334793 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.335739 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-sbh9f" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.336368 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.347662 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-62hbm"] Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.349116 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.351201 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.359586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-startup\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.359643 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b0c5529-861a-4fa1-82f7-72c2463171ee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.359676 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-reloader\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.359704 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-sockets\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.360000 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgf4w\" (UniqueName: \"kubernetes.io/projected/7b0c5529-861a-4fa1-82f7-72c2463171ee-kube-api-access-fgf4w\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.360078 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics-certs\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.360269 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.360308 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7brfq\" (UniqueName: \"kubernetes.io/projected/8f00dfd9-29e9-420b-925f-8e875da31ee8-kube-api-access-7brfq\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.360434 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-conf\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.376904 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-62hbm"] Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462145 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-conf\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462262 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462342 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkdhz\" (UniqueName: \"kubernetes.io/projected/487029c0-a6d2-4f9a-a9d1-d819b22d1279-kube-api-access-zkdhz\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462391 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462444 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-startup\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462502 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b0c5529-861a-4fa1-82f7-72c2463171ee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462554 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metallb-excludel2\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462588 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-reloader\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462656 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-sockets\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462731 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgf4w\" (UniqueName: \"kubernetes.io/projected/7b0c5529-861a-4fa1-82f7-72c2463171ee-kube-api-access-fgf4w\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462765 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csfjc\" (UniqueName: \"kubernetes.io/projected/29e50547-faa6-4d14-adee-5ea9e0264a42-kube-api-access-csfjc\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462816 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics-certs\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462878 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-cert\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462942 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-conf\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.462935 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7brfq\" (UniqueName: \"kubernetes.io/projected/8f00dfd9-29e9-420b-925f-8e875da31ee8-kube-api-access-7brfq\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463058 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-sockets\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463140 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-metrics-certs\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463365 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-reloader\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463403 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.463662 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8f00dfd9-29e9-420b-925f-8e875da31ee8-frr-startup\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.470329 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b0c5529-861a-4fa1-82f7-72c2463171ee-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.472269 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f00dfd9-29e9-420b-925f-8e875da31ee8-metrics-certs\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.500420 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgf4w\" (UniqueName: \"kubernetes.io/projected/7b0c5529-861a-4fa1-82f7-72c2463171ee-kube-api-access-fgf4w\") pod \"frr-k8s-webhook-server-7fcb986d4-vwpnc\" (UID: \"7b0c5529-861a-4fa1-82f7-72c2463171ee\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.509362 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7brfq\" (UniqueName: \"kubernetes.io/projected/8f00dfd9-29e9-420b-925f-8e875da31ee8-kube-api-access-7brfq\") pod \"frr-k8s-dhxv4\" (UID: \"8f00dfd9-29e9-420b-925f-8e875da31ee8\") " pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.551997 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564502 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564563 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metallb-excludel2\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564596 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csfjc\" (UniqueName: \"kubernetes.io/projected/29e50547-faa6-4d14-adee-5ea9e0264a42-kube-api-access-csfjc\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564625 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-cert\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564658 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-metrics-certs\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564683 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.564704 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkdhz\" (UniqueName: \"kubernetes.io/projected/487029c0-a6d2-4f9a-a9d1-d819b22d1279-kube-api-access-zkdhz\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.564748 4852 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.564901 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist podName:487029c0-a6d2-4f9a-a9d1-d819b22d1279 nodeName:}" failed. No retries permitted until 2025-12-01 20:18:19.064869774 +0000 UTC m=+818.991951351 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist") pod "speaker-bs8kz" (UID: "487029c0-a6d2-4f9a-a9d1-d819b22d1279") : secret "metallb-memberlist" not found Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.565778 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metallb-excludel2\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.565861 4852 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 01 20:18:18 crc kubenswrapper[4852]: E1201 20:18:18.565910 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs podName:487029c0-a6d2-4f9a-a9d1-d819b22d1279 nodeName:}" failed. No retries permitted until 2025-12-01 20:18:19.065892926 +0000 UTC m=+818.992974343 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs") pod "speaker-bs8kz" (UID: "487029c0-a6d2-4f9a-a9d1-d819b22d1279") : secret "speaker-certs-secret" not found Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.570410 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-metrics-certs\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.576024 4852 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.576279 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.590490 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkdhz\" (UniqueName: \"kubernetes.io/projected/487029c0-a6d2-4f9a-a9d1-d819b22d1279-kube-api-access-zkdhz\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.597301 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29e50547-faa6-4d14-adee-5ea9e0264a42-cert\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.638987 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csfjc\" (UniqueName: \"kubernetes.io/projected/29e50547-faa6-4d14-adee-5ea9e0264a42-kube-api-access-csfjc\") pod \"controller-f8648f98b-62hbm\" (UID: \"29e50547-faa6-4d14-adee-5ea9e0264a42\") " pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.668252 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.877626 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc"] Dec 01 20:18:18 crc kubenswrapper[4852]: W1201 20:18:18.886153 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b0c5529_861a_4fa1_82f7_72c2463171ee.slice/crio-ec9806569618a49ad851739ba835ecbe414b691c7d7bbb142bda98bfcb8a5da7 WatchSource:0}: Error finding container ec9806569618a49ad851739ba835ecbe414b691c7d7bbb142bda98bfcb8a5da7: Status 404 returned error can't find the container with id ec9806569618a49ad851739ba835ecbe414b691c7d7bbb142bda98bfcb8a5da7 Dec 01 20:18:18 crc kubenswrapper[4852]: I1201 20:18:18.957213 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-62hbm"] Dec 01 20:18:18 crc kubenswrapper[4852]: W1201 20:18:18.964818 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29e50547_faa6_4d14_adee_5ea9e0264a42.slice/crio-06f68c33aeed1f689cd6d79922a27b8d0918befc0ab6c1025faffcf77aa1041f WatchSource:0}: Error finding container 06f68c33aeed1f689cd6d79922a27b8d0918befc0ab6c1025faffcf77aa1041f: Status 404 returned error can't find the container with id 06f68c33aeed1f689cd6d79922a27b8d0918befc0ab6c1025faffcf77aa1041f Dec 01 20:18:19 crc kubenswrapper[4852]: I1201 20:18:19.072045 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:19 crc kubenswrapper[4852]: I1201 20:18:19.072135 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:19 crc kubenswrapper[4852]: E1201 20:18:19.072254 4852 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 20:18:19 crc kubenswrapper[4852]: E1201 20:18:19.072329 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist podName:487029c0-a6d2-4f9a-a9d1-d819b22d1279 nodeName:}" failed. No retries permitted until 2025-12-01 20:18:20.072310144 +0000 UTC m=+819.999391561 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist") pod "speaker-bs8kz" (UID: "487029c0-a6d2-4f9a-a9d1-d819b22d1279") : secret "metallb-memberlist" not found Dec 01 20:18:19 crc kubenswrapper[4852]: I1201 20:18:19.081638 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-metrics-certs\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:19 crc kubenswrapper[4852]: I1201 20:18:19.267680 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-62hbm" event={"ID":"29e50547-faa6-4d14-adee-5ea9e0264a42","Type":"ContainerStarted","Data":"06f68c33aeed1f689cd6d79922a27b8d0918befc0ab6c1025faffcf77aa1041f"} Dec 01 20:18:19 crc kubenswrapper[4852]: I1201 20:18:19.269099 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" event={"ID":"7b0c5529-861a-4fa1-82f7-72c2463171ee","Type":"ContainerStarted","Data":"ec9806569618a49ad851739ba835ecbe414b691c7d7bbb142bda98bfcb8a5da7"} Dec 01 20:18:20 crc kubenswrapper[4852]: I1201 20:18:20.086432 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:20 crc kubenswrapper[4852]: E1201 20:18:20.086640 4852 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 20:18:20 crc kubenswrapper[4852]: E1201 20:18:20.087077 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist podName:487029c0-a6d2-4f9a-a9d1-d819b22d1279 nodeName:}" failed. No retries permitted until 2025-12-01 20:18:22.087051317 +0000 UTC m=+822.014132754 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist") pod "speaker-bs8kz" (UID: "487029c0-a6d2-4f9a-a9d1-d819b22d1279") : secret "metallb-memberlist" not found Dec 01 20:18:20 crc kubenswrapper[4852]: I1201 20:18:20.279157 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"b8804d94d582b7fdb8edc7b315221eee0aad687d5f5b4c46421bea3313967e4b"} Dec 01 20:18:20 crc kubenswrapper[4852]: I1201 20:18:20.281537 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-62hbm" event={"ID":"29e50547-faa6-4d14-adee-5ea9e0264a42","Type":"ContainerStarted","Data":"9725505f9c5a31d561fef4000d53943d7f3e93709e3aef56de68ec82af1978d8"} Dec 01 20:18:21 crc kubenswrapper[4852]: I1201 20:18:21.290119 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-62hbm" event={"ID":"29e50547-faa6-4d14-adee-5ea9e0264a42","Type":"ContainerStarted","Data":"64f5b402b992f552bb778f902bb71cc3962f1a1aedf949e9b5f7c32f38fcd376"} Dec 01 20:18:21 crc kubenswrapper[4852]: I1201 20:18:21.290340 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:21 crc kubenswrapper[4852]: I1201 20:18:21.317713 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-62hbm" podStartSLOduration=3.317691158 podStartE2EDuration="3.317691158s" podCreationTimestamp="2025-12-01 20:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:18:21.312219508 +0000 UTC m=+821.239300935" watchObservedRunningTime="2025-12-01 20:18:21.317691158 +0000 UTC m=+821.244772585" Dec 01 20:18:22 crc kubenswrapper[4852]: I1201 20:18:22.118539 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:22 crc kubenswrapper[4852]: I1201 20:18:22.129781 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/487029c0-a6d2-4f9a-a9d1-d819b22d1279-memberlist\") pod \"speaker-bs8kz\" (UID: \"487029c0-a6d2-4f9a-a9d1-d819b22d1279\") " pod="metallb-system/speaker-bs8kz" Dec 01 20:18:22 crc kubenswrapper[4852]: I1201 20:18:22.248509 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bs8kz" Dec 01 20:18:23 crc kubenswrapper[4852]: I1201 20:18:23.323166 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bs8kz" event={"ID":"487029c0-a6d2-4f9a-a9d1-d819b22d1279","Type":"ContainerStarted","Data":"b22fd478e2714a7946c00fec26fcd46085ba2ab5b597b93444be2bfd83186b1a"} Dec 01 20:18:23 crc kubenswrapper[4852]: I1201 20:18:23.323646 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bs8kz" event={"ID":"487029c0-a6d2-4f9a-a9d1-d819b22d1279","Type":"ContainerStarted","Data":"5356e681b840685bd523ddc01b8011416f471da3f9b0e9a89c0fc86335d16d19"} Dec 01 20:18:23 crc kubenswrapper[4852]: I1201 20:18:23.323936 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bs8kz" event={"ID":"487029c0-a6d2-4f9a-a9d1-d819b22d1279","Type":"ContainerStarted","Data":"dd48a3dc1cea0d25576eaac4a355bd9849f1fa73e1554e461ffcdf32771eb0ad"} Dec 01 20:18:23 crc kubenswrapper[4852]: I1201 20:18:23.345945 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-bs8kz" podStartSLOduration=5.345918156 podStartE2EDuration="5.345918156s" podCreationTimestamp="2025-12-01 20:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:18:23.340913641 +0000 UTC m=+823.267995058" watchObservedRunningTime="2025-12-01 20:18:23.345918156 +0000 UTC m=+823.272999573" Dec 01 20:18:24 crc kubenswrapper[4852]: I1201 20:18:24.337624 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-bs8kz" Dec 01 20:18:28 crc kubenswrapper[4852]: I1201 20:18:28.389244 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" event={"ID":"7b0c5529-861a-4fa1-82f7-72c2463171ee","Type":"ContainerStarted","Data":"790c0f5dd7e8539e07e0ce66f26a9501ea9c955ff8d59fad78a81c153d747026"} Dec 01 20:18:28 crc kubenswrapper[4852]: I1201 20:18:28.390068 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:28 crc kubenswrapper[4852]: I1201 20:18:28.391358 4852 generic.go:334] "Generic (PLEG): container finished" podID="8f00dfd9-29e9-420b-925f-8e875da31ee8" containerID="591747db4f7015fac607bbb3655f2af0b6f9c77ec6869bda0a68ac3f8eaccf58" exitCode=0 Dec 01 20:18:28 crc kubenswrapper[4852]: I1201 20:18:28.391432 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerDied","Data":"591747db4f7015fac607bbb3655f2af0b6f9c77ec6869bda0a68ac3f8eaccf58"} Dec 01 20:18:28 crc kubenswrapper[4852]: I1201 20:18:28.415228 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" podStartSLOduration=1.741082289 podStartE2EDuration="10.415207909s" podCreationTimestamp="2025-12-01 20:18:18 +0000 UTC" firstStartedPulling="2025-12-01 20:18:18.88988172 +0000 UTC m=+818.816963137" lastFinishedPulling="2025-12-01 20:18:27.56400735 +0000 UTC m=+827.491088757" observedRunningTime="2025-12-01 20:18:28.412306549 +0000 UTC m=+828.339387976" watchObservedRunningTime="2025-12-01 20:18:28.415207909 +0000 UTC m=+828.342289336" Dec 01 20:18:29 crc kubenswrapper[4852]: I1201 20:18:29.401907 4852 generic.go:334] "Generic (PLEG): container finished" podID="8f00dfd9-29e9-420b-925f-8e875da31ee8" containerID="aa8f76c1b86097051f6c59f1b11488aa61a4eb64196ca22afb3059d2bc97e1a8" exitCode=0 Dec 01 20:18:29 crc kubenswrapper[4852]: I1201 20:18:29.402025 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerDied","Data":"aa8f76c1b86097051f6c59f1b11488aa61a4eb64196ca22afb3059d2bc97e1a8"} Dec 01 20:18:30 crc kubenswrapper[4852]: I1201 20:18:30.411837 4852 generic.go:334] "Generic (PLEG): container finished" podID="8f00dfd9-29e9-420b-925f-8e875da31ee8" containerID="1549a57cf397d8a905b14f55c39f5823376612d0d3db3204f72a3c5cfdec5604" exitCode=0 Dec 01 20:18:30 crc kubenswrapper[4852]: I1201 20:18:30.411924 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerDied","Data":"1549a57cf397d8a905b14f55c39f5823376612d0d3db3204f72a3c5cfdec5604"} Dec 01 20:18:31 crc kubenswrapper[4852]: I1201 20:18:31.434852 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"0e96169d7def7f4a80d8063af65fb3c16e6f2f7f30ffd9bafcbb352d809efd63"} Dec 01 20:18:31 crc kubenswrapper[4852]: I1201 20:18:31.435358 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"0fbbf8a6c8886811558ae66eca7b20dd18eb77ba9b871a34247474176cd47e0f"} Dec 01 20:18:31 crc kubenswrapper[4852]: I1201 20:18:31.435376 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"d7154f91b73d658bd4b69b82ffcfa3515525baaf51fbc2ecf6129d84716d26ec"} Dec 01 20:18:31 crc kubenswrapper[4852]: I1201 20:18:31.435408 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"055a6533c429cf0b5704ff94f693f7cdf34c21cf08684117b135f773fdd87dd5"} Dec 01 20:18:31 crc kubenswrapper[4852]: I1201 20:18:31.435421 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"90ea992efad706550e1ac9a0c2c9a71421509041d89c20afb0584184d8911e9b"} Dec 01 20:18:32 crc kubenswrapper[4852]: I1201 20:18:32.257084 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-bs8kz" Dec 01 20:18:32 crc kubenswrapper[4852]: I1201 20:18:32.453104 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dhxv4" event={"ID":"8f00dfd9-29e9-420b-925f-8e875da31ee8","Type":"ContainerStarted","Data":"97e218090ffbb3a49f6c91634ac8ca55548beaf4ac2e7d47577450bd60f7680a"} Dec 01 20:18:32 crc kubenswrapper[4852]: I1201 20:18:32.453662 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:32 crc kubenswrapper[4852]: I1201 20:18:32.497839 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-dhxv4" podStartSLOduration=7.098410386 podStartE2EDuration="14.497808603s" podCreationTimestamp="2025-12-01 20:18:18 +0000 UTC" firstStartedPulling="2025-12-01 20:18:20.182867342 +0000 UTC m=+820.109948759" lastFinishedPulling="2025-12-01 20:18:27.582265519 +0000 UTC m=+827.509346976" observedRunningTime="2025-12-01 20:18:32.489519295 +0000 UTC m=+832.416600722" watchObservedRunningTime="2025-12-01 20:18:32.497808603 +0000 UTC m=+832.424890030" Dec 01 20:18:33 crc kubenswrapper[4852]: I1201 20:18:33.577509 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:33 crc kubenswrapper[4852]: I1201 20:18:33.646739 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.451936 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.454700 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.459222 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-fgrqp" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.459436 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.459618 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.471143 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.496647 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sktq\" (UniqueName: \"kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq\") pod \"openstack-operator-index-jc6hj\" (UID: \"30429633-f9e8-4693-820a-db458b3ebaa8\") " pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.597918 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sktq\" (UniqueName: \"kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq\") pod \"openstack-operator-index-jc6hj\" (UID: \"30429633-f9e8-4693-820a-db458b3ebaa8\") " pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.621015 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sktq\" (UniqueName: \"kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq\") pod \"openstack-operator-index-jc6hj\" (UID: \"30429633-f9e8-4693-820a-db458b3ebaa8\") " pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:35 crc kubenswrapper[4852]: I1201 20:18:35.779282 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:36 crc kubenswrapper[4852]: I1201 20:18:36.063744 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:36 crc kubenswrapper[4852]: W1201 20:18:36.073338 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30429633_f9e8_4693_820a_db458b3ebaa8.slice/crio-9cf7d1171ec779dd53c3bad70a099d72883f9ba0a03299b9bedc4b9730fb7408 WatchSource:0}: Error finding container 9cf7d1171ec779dd53c3bad70a099d72883f9ba0a03299b9bedc4b9730fb7408: Status 404 returned error can't find the container with id 9cf7d1171ec779dd53c3bad70a099d72883f9ba0a03299b9bedc4b9730fb7408 Dec 01 20:18:36 crc kubenswrapper[4852]: I1201 20:18:36.488054 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jc6hj" event={"ID":"30429633-f9e8-4693-820a-db458b3ebaa8","Type":"ContainerStarted","Data":"9cf7d1171ec779dd53c3bad70a099d72883f9ba0a03299b9bedc4b9730fb7408"} Dec 01 20:18:38 crc kubenswrapper[4852]: I1201 20:18:38.502262 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jc6hj" event={"ID":"30429633-f9e8-4693-820a-db458b3ebaa8","Type":"ContainerStarted","Data":"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb"} Dec 01 20:18:38 crc kubenswrapper[4852]: I1201 20:18:38.531247 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jc6hj" podStartSLOduration=2.141165538 podStartE2EDuration="3.531217184s" podCreationTimestamp="2025-12-01 20:18:35 +0000 UTC" firstStartedPulling="2025-12-01 20:18:36.076405535 +0000 UTC m=+836.003486952" lastFinishedPulling="2025-12-01 20:18:37.466457181 +0000 UTC m=+837.393538598" observedRunningTime="2025-12-01 20:18:38.523666949 +0000 UTC m=+838.450748406" watchObservedRunningTime="2025-12-01 20:18:38.531217184 +0000 UTC m=+838.458298621" Dec 01 20:18:38 crc kubenswrapper[4852]: I1201 20:18:38.643877 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vwpnc" Dec 01 20:18:38 crc kubenswrapper[4852]: I1201 20:18:38.676995 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-62hbm" Dec 01 20:18:38 crc kubenswrapper[4852]: I1201 20:18:38.827870 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.428024 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-kc7p9"] Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.428868 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.443998 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kc7p9"] Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.460925 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j88h\" (UniqueName: \"kubernetes.io/projected/3e4739d8-ba85-4187-9f0b-b51d0c81b8f5-kube-api-access-2j88h\") pod \"openstack-operator-index-kc7p9\" (UID: \"3e4739d8-ba85-4187-9f0b-b51d0c81b8f5\") " pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.562959 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j88h\" (UniqueName: \"kubernetes.io/projected/3e4739d8-ba85-4187-9f0b-b51d0c81b8f5-kube-api-access-2j88h\") pod \"openstack-operator-index-kc7p9\" (UID: \"3e4739d8-ba85-4187-9f0b-b51d0c81b8f5\") " pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.588447 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j88h\" (UniqueName: \"kubernetes.io/projected/3e4739d8-ba85-4187-9f0b-b51d0c81b8f5-kube-api-access-2j88h\") pod \"openstack-operator-index-kc7p9\" (UID: \"3e4739d8-ba85-4187-9f0b-b51d0c81b8f5\") " pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:39 crc kubenswrapper[4852]: I1201 20:18:39.747250 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:40 crc kubenswrapper[4852]: I1201 20:18:40.412751 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kc7p9"] Dec 01 20:18:40 crc kubenswrapper[4852]: I1201 20:18:40.514536 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kc7p9" event={"ID":"3e4739d8-ba85-4187-9f0b-b51d0c81b8f5","Type":"ContainerStarted","Data":"f8a0fe1e0a98a869aae46ca6cd76273554797d32b687e039225880c2397958bc"} Dec 01 20:18:40 crc kubenswrapper[4852]: I1201 20:18:40.514744 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jc6hj" podUID="30429633-f9e8-4693-820a-db458b3ebaa8" containerName="registry-server" containerID="cri-o://71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb" gracePeriod=2 Dec 01 20:18:40 crc kubenswrapper[4852]: I1201 20:18:40.917680 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.029605 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sktq\" (UniqueName: \"kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq\") pod \"30429633-f9e8-4693-820a-db458b3ebaa8\" (UID: \"30429633-f9e8-4693-820a-db458b3ebaa8\") " Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.038090 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq" (OuterVolumeSpecName: "kube-api-access-8sktq") pod "30429633-f9e8-4693-820a-db458b3ebaa8" (UID: "30429633-f9e8-4693-820a-db458b3ebaa8"). InnerVolumeSpecName "kube-api-access-8sktq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.131574 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sktq\" (UniqueName: \"kubernetes.io/projected/30429633-f9e8-4693-820a-db458b3ebaa8-kube-api-access-8sktq\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.523863 4852 generic.go:334] "Generic (PLEG): container finished" podID="30429633-f9e8-4693-820a-db458b3ebaa8" containerID="71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb" exitCode=0 Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.524146 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jc6hj" event={"ID":"30429633-f9e8-4693-820a-db458b3ebaa8","Type":"ContainerDied","Data":"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb"} Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.524191 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jc6hj" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.524574 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jc6hj" event={"ID":"30429633-f9e8-4693-820a-db458b3ebaa8","Type":"ContainerDied","Data":"9cf7d1171ec779dd53c3bad70a099d72883f9ba0a03299b9bedc4b9730fb7408"} Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.524641 4852 scope.go:117] "RemoveContainer" containerID="71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.529488 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kc7p9" event={"ID":"3e4739d8-ba85-4187-9f0b-b51d0c81b8f5","Type":"ContainerStarted","Data":"cd4fe09c9606e80dd5f646f6debeef5fb4f18dd044d5cb21aab55bbed83e7143"} Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.548992 4852 scope.go:117] "RemoveContainer" containerID="71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb" Dec 01 20:18:41 crc kubenswrapper[4852]: E1201 20:18:41.550529 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb\": container with ID starting with 71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb not found: ID does not exist" containerID="71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.550573 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb"} err="failed to get container status \"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb\": rpc error: code = NotFound desc = could not find container \"71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb\": container with ID starting with 71e7113b70cf3499e7768dc0661dc69d965c228457fe02428c6279052b880acb not found: ID does not exist" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.556503 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-kc7p9" podStartSLOduration=1.599226302 podStartE2EDuration="2.556479985s" podCreationTimestamp="2025-12-01 20:18:39 +0000 UTC" firstStartedPulling="2025-12-01 20:18:40.425659525 +0000 UTC m=+840.352740952" lastFinishedPulling="2025-12-01 20:18:41.382913218 +0000 UTC m=+841.309994635" observedRunningTime="2025-12-01 20:18:41.543697587 +0000 UTC m=+841.470779024" watchObservedRunningTime="2025-12-01 20:18:41.556479985 +0000 UTC m=+841.483561402" Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.560522 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:41 crc kubenswrapper[4852]: I1201 20:18:41.564524 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jc6hj"] Dec 01 20:18:42 crc kubenswrapper[4852]: I1201 20:18:42.331842 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30429633-f9e8-4693-820a-db458b3ebaa8" path="/var/lib/kubelet/pods/30429633-f9e8-4693-820a-db458b3ebaa8/volumes" Dec 01 20:18:48 crc kubenswrapper[4852]: I1201 20:18:48.583630 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-dhxv4" Dec 01 20:18:49 crc kubenswrapper[4852]: I1201 20:18:49.747938 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:49 crc kubenswrapper[4852]: I1201 20:18:49.748022 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:49 crc kubenswrapper[4852]: I1201 20:18:49.790431 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:50 crc kubenswrapper[4852]: I1201 20:18:50.626177 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-kc7p9" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.095846 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw"] Dec 01 20:18:52 crc kubenswrapper[4852]: E1201 20:18:52.096729 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30429633-f9e8-4693-820a-db458b3ebaa8" containerName="registry-server" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.096755 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="30429633-f9e8-4693-820a-db458b3ebaa8" containerName="registry-server" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.097278 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="30429633-f9e8-4693-820a-db458b3ebaa8" containerName="registry-server" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.100502 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.107246 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-p9xfw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.118333 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw"] Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.200396 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.200505 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrxn5\" (UniqueName: \"kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.200617 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.302689 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.302761 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrxn5\" (UniqueName: \"kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.302813 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.303520 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.304314 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.331075 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrxn5\" (UniqueName: \"kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.436365 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:52 crc kubenswrapper[4852]: I1201 20:18:52.713490 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw"] Dec 01 20:18:53 crc kubenswrapper[4852]: I1201 20:18:53.617685 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" event={"ID":"894e8bba-50f5-4183-87b8-171444b663d3","Type":"ContainerStarted","Data":"c48cad05e07efda8310bfd12b43157c9103260e0a294b35afc134c8cbe3a73bd"} Dec 01 20:18:54 crc kubenswrapper[4852]: I1201 20:18:54.628828 4852 generic.go:334] "Generic (PLEG): container finished" podID="894e8bba-50f5-4183-87b8-171444b663d3" containerID="921c87bbae650e1db20864f6698da58ae2177ba407a25c7f85006d82bdeaa8b2" exitCode=0 Dec 01 20:18:54 crc kubenswrapper[4852]: I1201 20:18:54.629036 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" event={"ID":"894e8bba-50f5-4183-87b8-171444b663d3","Type":"ContainerDied","Data":"921c87bbae650e1db20864f6698da58ae2177ba407a25c7f85006d82bdeaa8b2"} Dec 01 20:18:56 crc kubenswrapper[4852]: I1201 20:18:56.647232 4852 generic.go:334] "Generic (PLEG): container finished" podID="894e8bba-50f5-4183-87b8-171444b663d3" containerID="8a7503cb4d6d36b416a7a7ad99881d4d6a7273e3dd4cc71db408ffd3063db792" exitCode=0 Dec 01 20:18:56 crc kubenswrapper[4852]: I1201 20:18:56.647288 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" event={"ID":"894e8bba-50f5-4183-87b8-171444b663d3","Type":"ContainerDied","Data":"8a7503cb4d6d36b416a7a7ad99881d4d6a7273e3dd4cc71db408ffd3063db792"} Dec 01 20:18:57 crc kubenswrapper[4852]: I1201 20:18:57.659584 4852 generic.go:334] "Generic (PLEG): container finished" podID="894e8bba-50f5-4183-87b8-171444b663d3" containerID="34c329c6b914a284695b24c839d8d5397dd778ce5fd1ab75b702900423754f91" exitCode=0 Dec 01 20:18:57 crc kubenswrapper[4852]: I1201 20:18:57.659680 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" event={"ID":"894e8bba-50f5-4183-87b8-171444b663d3","Type":"ContainerDied","Data":"34c329c6b914a284695b24c839d8d5397dd778ce5fd1ab75b702900423754f91"} Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.004076 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.135610 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util\") pod \"894e8bba-50f5-4183-87b8-171444b663d3\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.135876 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrxn5\" (UniqueName: \"kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5\") pod \"894e8bba-50f5-4183-87b8-171444b663d3\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.135946 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle\") pod \"894e8bba-50f5-4183-87b8-171444b663d3\" (UID: \"894e8bba-50f5-4183-87b8-171444b663d3\") " Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.136703 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle" (OuterVolumeSpecName: "bundle") pod "894e8bba-50f5-4183-87b8-171444b663d3" (UID: "894e8bba-50f5-4183-87b8-171444b663d3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.149248 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util" (OuterVolumeSpecName: "util") pod "894e8bba-50f5-4183-87b8-171444b663d3" (UID: "894e8bba-50f5-4183-87b8-171444b663d3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.155969 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5" (OuterVolumeSpecName: "kube-api-access-qrxn5") pod "894e8bba-50f5-4183-87b8-171444b663d3" (UID: "894e8bba-50f5-4183-87b8-171444b663d3"). InnerVolumeSpecName "kube-api-access-qrxn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.237275 4852 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-util\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.237322 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrxn5\" (UniqueName: \"kubernetes.io/projected/894e8bba-50f5-4183-87b8-171444b663d3-kube-api-access-qrxn5\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.237338 4852 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/894e8bba-50f5-4183-87b8-171444b663d3-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.677144 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" event={"ID":"894e8bba-50f5-4183-87b8-171444b663d3","Type":"ContainerDied","Data":"c48cad05e07efda8310bfd12b43157c9103260e0a294b35afc134c8cbe3a73bd"} Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.677194 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c48cad05e07efda8310bfd12b43157c9103260e0a294b35afc134c8cbe3a73bd" Dec 01 20:18:59 crc kubenswrapper[4852]: I1201 20:18:59.677214 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.328385 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn"] Dec 01 20:19:04 crc kubenswrapper[4852]: E1201 20:19:04.329636 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="pull" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.329654 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="pull" Dec 01 20:19:04 crc kubenswrapper[4852]: E1201 20:19:04.329695 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="util" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.329707 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="util" Dec 01 20:19:04 crc kubenswrapper[4852]: E1201 20:19:04.329726 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="extract" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.329735 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="extract" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.330010 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="894e8bba-50f5-4183-87b8-171444b663d3" containerName="extract" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.330827 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.337179 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-b77fj" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.357955 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn"] Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.413199 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vscv4\" (UniqueName: \"kubernetes.io/projected/7c160373-5106-41ac-8e58-9ae48e82f5b5-kube-api-access-vscv4\") pod \"openstack-operator-controller-operator-6ddddd9d6f-vz4mn\" (UID: \"7c160373-5106-41ac-8e58-9ae48e82f5b5\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.514293 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vscv4\" (UniqueName: \"kubernetes.io/projected/7c160373-5106-41ac-8e58-9ae48e82f5b5-kube-api-access-vscv4\") pod \"openstack-operator-controller-operator-6ddddd9d6f-vz4mn\" (UID: \"7c160373-5106-41ac-8e58-9ae48e82f5b5\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.536374 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vscv4\" (UniqueName: \"kubernetes.io/projected/7c160373-5106-41ac-8e58-9ae48e82f5b5-kube-api-access-vscv4\") pod \"openstack-operator-controller-operator-6ddddd9d6f-vz4mn\" (UID: \"7c160373-5106-41ac-8e58-9ae48e82f5b5\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:04 crc kubenswrapper[4852]: I1201 20:19:04.655615 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:05 crc kubenswrapper[4852]: I1201 20:19:05.025837 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn"] Dec 01 20:19:05 crc kubenswrapper[4852]: I1201 20:19:05.729956 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" event={"ID":"7c160373-5106-41ac-8e58-9ae48e82f5b5","Type":"ContainerStarted","Data":"08c31c9e724c37a670884bee842b917778ebee86a2e0302a397b34378023d0d9"} Dec 01 20:19:11 crc kubenswrapper[4852]: I1201 20:19:11.778819 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" event={"ID":"7c160373-5106-41ac-8e58-9ae48e82f5b5","Type":"ContainerStarted","Data":"78c20d324e79aeb9230d6c69bf625eed26b821858cdb03e123e1d5fa56e59af0"} Dec 01 20:19:11 crc kubenswrapper[4852]: I1201 20:19:11.779706 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:11 crc kubenswrapper[4852]: I1201 20:19:11.820514 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" podStartSLOduration=2.014235419 podStartE2EDuration="7.82048867s" podCreationTimestamp="2025-12-01 20:19:04 +0000 UTC" firstStartedPulling="2025-12-01 20:19:05.035275754 +0000 UTC m=+864.962357171" lastFinishedPulling="2025-12-01 20:19:10.841529005 +0000 UTC m=+870.768610422" observedRunningTime="2025-12-01 20:19:11.81665575 +0000 UTC m=+871.743737167" watchObservedRunningTime="2025-12-01 20:19:11.82048867 +0000 UTC m=+871.747570107" Dec 01 20:19:20 crc kubenswrapper[4852]: I1201 20:19:20.231218 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:19:20 crc kubenswrapper[4852]: I1201 20:19:20.231880 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:19:24 crc kubenswrapper[4852]: I1201 20:19:24.660306 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-vz4mn" Dec 01 20:19:42 crc kubenswrapper[4852]: I1201 20:19:42.913046 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:19:42 crc kubenswrapper[4852]: I1201 20:19:42.915995 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:42 crc kubenswrapper[4852]: I1201 20:19:42.935635 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.098161 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf4cx\" (UniqueName: \"kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.098302 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.098327 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.200096 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.200157 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.200238 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf4cx\" (UniqueName: \"kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.200842 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.200848 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.230766 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf4cx\" (UniqueName: \"kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx\") pod \"community-operators-95kl4\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.235177 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.698219 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.699758 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.704962 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-5hz2n" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.712645 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.722567 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.723767 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.729501 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-mknqg" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.730384 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.739367 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.741359 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.746589 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2w84f" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.788906 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.809587 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtnpx\" (UniqueName: \"kubernetes.io/projected/152c7905-249d-4195-afe6-7b02b5d8267d-kube-api-access-dtnpx\") pod \"cinder-operator-controller-manager-859b6ccc6-2khxz\" (UID: \"152c7905-249d-4195-afe6-7b02b5d8267d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.809646 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9m8k\" (UniqueName: \"kubernetes.io/projected/7d884d8c-acfc-47fe-bee2-f0248f8b0eea-kube-api-access-h9m8k\") pod \"barbican-operator-controller-manager-7d9dfd778-bd4jr\" (UID: \"7d884d8c-acfc-47fe-bee2-f0248f8b0eea\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.832835 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.840219 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.850924 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-dbkwm" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.865527 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.908306 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.924245 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp6dv\" (UniqueName: \"kubernetes.io/projected/714caded-89c7-44a3-a832-2fbaa0e00ac2-kube-api-access-mp6dv\") pod \"designate-operator-controller-manager-78b4bc895b-gtsjg\" (UID: \"714caded-89c7-44a3-a832-2fbaa0e00ac2\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.924784 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtnpx\" (UniqueName: \"kubernetes.io/projected/152c7905-249d-4195-afe6-7b02b5d8267d-kube-api-access-dtnpx\") pod \"cinder-operator-controller-manager-859b6ccc6-2khxz\" (UID: \"152c7905-249d-4195-afe6-7b02b5d8267d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.924830 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9m8k\" (UniqueName: \"kubernetes.io/projected/7d884d8c-acfc-47fe-bee2-f0248f8b0eea-kube-api-access-h9m8k\") pod \"barbican-operator-controller-manager-7d9dfd778-bd4jr\" (UID: \"7d884d8c-acfc-47fe-bee2-f0248f8b0eea\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.938315 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4lz2p" Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.980924 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.983752 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt"] Dec 01 20:19:43 crc kubenswrapper[4852]: I1201 20:19:43.984421 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtnpx\" (UniqueName: \"kubernetes.io/projected/152c7905-249d-4195-afe6-7b02b5d8267d-kube-api-access-dtnpx\") pod \"cinder-operator-controller-manager-859b6ccc6-2khxz\" (UID: \"152c7905-249d-4195-afe6-7b02b5d8267d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.009757 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.010293 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9m8k\" (UniqueName: \"kubernetes.io/projected/7d884d8c-acfc-47fe-bee2-f0248f8b0eea-kube-api-access-h9m8k\") pod \"barbican-operator-controller-manager-7d9dfd778-bd4jr\" (UID: \"7d884d8c-acfc-47fe-bee2-f0248f8b0eea\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.018384 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.019543 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.022572 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.030229 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.031092 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdns8\" (UniqueName: \"kubernetes.io/projected/efb3ff96-731f-4a01-8bed-636717f36fb4-kube-api-access-qdns8\") pod \"glance-operator-controller-manager-668d9c48b9-ccgjt\" (UID: \"efb3ff96-731f-4a01-8bed-636717f36fb4\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.037140 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4928\" (UniqueName: \"kubernetes.io/projected/b9f7343b-9bba-43e0-bb25-80a5f3fb139f-kube-api-access-q4928\") pod \"heat-operator-controller-manager-5f64f6f8bb-z7gzm\" (UID: \"b9f7343b-9bba-43e0-bb25-80a5f3fb139f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.038506 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp6dv\" (UniqueName: \"kubernetes.io/projected/714caded-89c7-44a3-a832-2fbaa0e00ac2-kube-api-access-mp6dv\") pod \"designate-operator-controller-manager-78b4bc895b-gtsjg\" (UID: \"714caded-89c7-44a3-a832-2fbaa0e00ac2\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.032885 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.049194 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-fx48l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.041850 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.050363 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.055832 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.065522 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.065739 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-nqgx8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.066520 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerStarted","Data":"94946c691344f284229023d9d1ceb23d9bb74e4fe3aa80bf2c254838fa86e181"} Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.081391 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp6dv\" (UniqueName: \"kubernetes.io/projected/714caded-89c7-44a3-a832-2fbaa0e00ac2-kube-api-access-mp6dv\") pod \"designate-operator-controller-manager-78b4bc895b-gtsjg\" (UID: \"714caded-89c7-44a3-a832-2fbaa0e00ac2\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.122477 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.129373 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.140035 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wvhc\" (UniqueName: \"kubernetes.io/projected/d2869063-cc44-4cd4-b1f6-5b33a5250e77-kube-api-access-8wvhc\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.140100 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.140134 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4928\" (UniqueName: \"kubernetes.io/projected/b9f7343b-9bba-43e0-bb25-80a5f3fb139f-kube-api-access-q4928\") pod \"heat-operator-controller-manager-5f64f6f8bb-z7gzm\" (UID: \"b9f7343b-9bba-43e0-bb25-80a5f3fb139f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.140173 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdns8\" (UniqueName: \"kubernetes.io/projected/efb3ff96-731f-4a01-8bed-636717f36fb4-kube-api-access-qdns8\") pod \"glance-operator-controller-manager-668d9c48b9-ccgjt\" (UID: \"efb3ff96-731f-4a01-8bed-636717f36fb4\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.140237 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgr29\" (UniqueName: \"kubernetes.io/projected/fa3d71fd-60b1-488c-9ae0-abb212b8d0a4-kube-api-access-hgr29\") pod \"horizon-operator-controller-manager-68c6d99b8f-q7fhl\" (UID: \"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.148647 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.161075 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.173963 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xv94s" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.174593 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6xg2w" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.201752 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.213378 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.218842 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdns8\" (UniqueName: \"kubernetes.io/projected/efb3ff96-731f-4a01-8bed-636717f36fb4-kube-api-access-qdns8\") pod \"glance-operator-controller-manager-668d9c48b9-ccgjt\" (UID: \"efb3ff96-731f-4a01-8bed-636717f36fb4\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.230581 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.231987 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.233250 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4928\" (UniqueName: \"kubernetes.io/projected/b9f7343b-9bba-43e0-bb25-80a5f3fb139f-kube-api-access-q4928\") pod \"heat-operator-controller-manager-5f64f6f8bb-z7gzm\" (UID: \"b9f7343b-9bba-43e0-bb25-80a5f3fb139f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.234842 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-2dvk4" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.244449 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr6bt\" (UniqueName: \"kubernetes.io/projected/3aa88cab-a21d-40d4-b278-8c006ce138ff-kube-api-access-fr6bt\") pod \"keystone-operator-controller-manager-546d4bdf48-nh9t8\" (UID: \"3aa88cab-a21d-40d4-b278-8c006ce138ff\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.244558 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgr29\" (UniqueName: \"kubernetes.io/projected/fa3d71fd-60b1-488c-9ae0-abb212b8d0a4-kube-api-access-hgr29\") pod \"horizon-operator-controller-manager-68c6d99b8f-q7fhl\" (UID: \"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.244606 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wvhc\" (UniqueName: \"kubernetes.io/projected/d2869063-cc44-4cd4-b1f6-5b33a5250e77-kube-api-access-8wvhc\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.244628 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.244692 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szh74\" (UniqueName: \"kubernetes.io/projected/b7818ab0-4a52-48fe-a0c8-88d162745762-kube-api-access-szh74\") pod \"ironic-operator-controller-manager-6c548fd776-4rphb\" (UID: \"b7818ab0-4a52-48fe-a0c8-88d162745762\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.245363 4852 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.245430 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert podName:d2869063-cc44-4cd4-b1f6-5b33a5250e77 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:44.745407057 +0000 UTC m=+904.672488474 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert") pod "infra-operator-controller-manager-57548d458d-7w8xr" (UID: "d2869063-cc44-4cd4-b1f6-5b33a5250e77") : secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.287924 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.298493 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wvhc\" (UniqueName: \"kubernetes.io/projected/d2869063-cc44-4cd4-b1f6-5b33a5250e77-kube-api-access-8wvhc\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.313206 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.314590 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.325941 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-8zcpt" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.326160 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgr29\" (UniqueName: \"kubernetes.io/projected/fa3d71fd-60b1-488c-9ae0-abb212b8d0a4-kube-api-access-hgr29\") pod \"horizon-operator-controller-manager-68c6d99b8f-q7fhl\" (UID: \"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.326499 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.346187 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlrzz\" (UniqueName: \"kubernetes.io/projected/4908b6e5-acd8-4754-877f-18a3b8897aa5-kube-api-access-rlrzz\") pod \"manila-operator-controller-manager-6546668bfd-lz6m7\" (UID: \"4908b6e5-acd8-4754-877f-18a3b8897aa5\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.346249 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szh74\" (UniqueName: \"kubernetes.io/projected/b7818ab0-4a52-48fe-a0c8-88d162745762-kube-api-access-szh74\") pod \"ironic-operator-controller-manager-6c548fd776-4rphb\" (UID: \"b7818ab0-4a52-48fe-a0c8-88d162745762\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.346283 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr6bt\" (UniqueName: \"kubernetes.io/projected/3aa88cab-a21d-40d4-b278-8c006ce138ff-kube-api-access-fr6bt\") pod \"keystone-operator-controller-manager-546d4bdf48-nh9t8\" (UID: \"3aa88cab-a21d-40d4-b278-8c006ce138ff\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.361898 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.365909 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.371815 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.371853 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.372922 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.373801 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.374055 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.376715 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-lw9gd" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.385403 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-fjlvg" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.401523 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2phng"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.403074 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.406817 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9xwc9" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.406868 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szh74\" (UniqueName: \"kubernetes.io/projected/b7818ab0-4a52-48fe-a0c8-88d162745762-kube-api-access-szh74\") pod \"ironic-operator-controller-manager-6c548fd776-4rphb\" (UID: \"b7818ab0-4a52-48fe-a0c8-88d162745762\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.414015 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr6bt\" (UniqueName: \"kubernetes.io/projected/3aa88cab-a21d-40d4-b278-8c006ce138ff-kube-api-access-fr6bt\") pod \"keystone-operator-controller-manager-546d4bdf48-nh9t8\" (UID: \"3aa88cab-a21d-40d4-b278-8c006ce138ff\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.414089 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.433428 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.452239 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q45d\" (UniqueName: \"kubernetes.io/projected/2842a3ca-0708-4395-babd-b9dbdc1509d8-kube-api-access-6q45d\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tdw8l\" (UID: \"2842a3ca-0708-4395-babd-b9dbdc1509d8\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.452322 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m6gs\" (UniqueName: \"kubernetes.io/projected/6ded5e30-894b-4718-b10e-6cdcf29ea854-kube-api-access-2m6gs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rpswz\" (UID: \"6ded5e30-894b-4718-b10e-6cdcf29ea854\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.452403 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlrzz\" (UniqueName: \"kubernetes.io/projected/4908b6e5-acd8-4754-877f-18a3b8897aa5-kube-api-access-rlrzz\") pod \"manila-operator-controller-manager-6546668bfd-lz6m7\" (UID: \"4908b6e5-acd8-4754-877f-18a3b8897aa5\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.452674 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq4z4\" (UniqueName: \"kubernetes.io/projected/4ec2c5f6-679b-4f91-ab45-3eae7b12cd13-kube-api-access-gq4z4\") pod \"nova-operator-controller-manager-697bc559fc-5mktq\" (UID: \"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.496644 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2phng"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.497908 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.511700 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlrzz\" (UniqueName: \"kubernetes.io/projected/4908b6e5-acd8-4754-877f-18a3b8897aa5-kube-api-access-rlrzz\") pod \"manila-operator-controller-manager-6546668bfd-lz6m7\" (UID: \"4908b6e5-acd8-4754-877f-18a3b8897aa5\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.524831 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.560711 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.566547 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.591035 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.605696 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.607708 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2nbvw" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.612379 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-6d9sv" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.612644 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.612759 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.618946 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q45d\" (UniqueName: \"kubernetes.io/projected/2842a3ca-0708-4395-babd-b9dbdc1509d8-kube-api-access-6q45d\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tdw8l\" (UID: \"2842a3ca-0708-4395-babd-b9dbdc1509d8\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.619006 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m6gs\" (UniqueName: \"kubernetes.io/projected/6ded5e30-894b-4718-b10e-6cdcf29ea854-kube-api-access-2m6gs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rpswz\" (UID: \"6ded5e30-894b-4718-b10e-6cdcf29ea854\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.619067 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq4z4\" (UniqueName: \"kubernetes.io/projected/4ec2c5f6-679b-4f91-ab45-3eae7b12cd13-kube-api-access-gq4z4\") pod \"nova-operator-controller-manager-697bc559fc-5mktq\" (UID: \"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.619131 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9t24\" (UniqueName: \"kubernetes.io/projected/cc6a88c3-1e45-470c-ba3b-c15c83afbcec-kube-api-access-t9t24\") pod \"octavia-operator-controller-manager-998648c74-2phng\" (UID: \"cc6a88c3-1e45-470c-ba3b-c15c83afbcec\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.648882 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.650155 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.653167 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq4z4\" (UniqueName: \"kubernetes.io/projected/4ec2c5f6-679b-4f91-ab45-3eae7b12cd13-kube-api-access-gq4z4\") pod \"nova-operator-controller-manager-697bc559fc-5mktq\" (UID: \"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.654829 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m6gs\" (UniqueName: \"kubernetes.io/projected/6ded5e30-894b-4718-b10e-6cdcf29ea854-kube-api-access-2m6gs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rpswz\" (UID: \"6ded5e30-894b-4718-b10e-6cdcf29ea854\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.655513 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q45d\" (UniqueName: \"kubernetes.io/projected/2842a3ca-0708-4395-babd-b9dbdc1509d8-kube-api-access-6q45d\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tdw8l\" (UID: \"2842a3ca-0708-4395-babd-b9dbdc1509d8\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.662249 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rf2jh" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.681442 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.692532 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.694727 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.708814 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-g5gbj" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.720831 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.720872 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvvpt\" (UniqueName: \"kubernetes.io/projected/8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca-kube-api-access-hvvpt\") pod \"placement-operator-controller-manager-78f8948974-bzkqb\" (UID: \"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.720915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pzbx\" (UniqueName: \"kubernetes.io/projected/268f049d-790e-4b1f-958d-0f07ba335215-kube-api-access-5pzbx\") pod \"swift-operator-controller-manager-5f8c65bbfc-blr2g\" (UID: \"268f049d-790e-4b1f-958d-0f07ba335215\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.720936 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-727zd\" (UniqueName: \"kubernetes.io/projected/e3a2d94e-61fb-406b-be5d-4ae5f0c18fda-kube-api-access-727zd\") pod \"ovn-operator-controller-manager-b6456fdb6-ktgl2\" (UID: \"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.720961 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2wj6\" (UniqueName: \"kubernetes.io/projected/c81cbe79-aa85-4707-a3d6-246bf422575b-kube-api-access-z2wj6\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.721004 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9t24\" (UniqueName: \"kubernetes.io/projected/cc6a88c3-1e45-470c-ba3b-c15c83afbcec-kube-api-access-t9t24\") pod \"octavia-operator-controller-manager-998648c74-2phng\" (UID: \"cc6a88c3-1e45-470c-ba3b-c15c83afbcec\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.744891 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.748875 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.749321 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.763332 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.766672 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.779435 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.788200 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.795372 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9t24\" (UniqueName: \"kubernetes.io/projected/cc6a88c3-1e45-470c-ba3b-c15c83afbcec-kube-api-access-t9t24\") pod \"octavia-operator-controller-manager-998648c74-2phng\" (UID: \"cc6a88c3-1e45-470c-ba3b-c15c83afbcec\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.806448 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.809809 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.811335 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.814991 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-7c8ht" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.830996 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-hncmj"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.834376 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.837411 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.838029 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-rjqvx" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.840230 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.848217 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-65qrn" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851566 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851620 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzqqv\" (UniqueName: \"kubernetes.io/projected/28cd4665-305c-4855-87c6-f267402d0b05-kube-api-access-hzqqv\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rqqkl\" (UID: \"28cd4665-305c-4855-87c6-f267402d0b05\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851657 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pzbx\" (UniqueName: \"kubernetes.io/projected/268f049d-790e-4b1f-958d-0f07ba335215-kube-api-access-5pzbx\") pod \"swift-operator-controller-manager-5f8c65bbfc-blr2g\" (UID: \"268f049d-790e-4b1f-958d-0f07ba335215\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851680 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-727zd\" (UniqueName: \"kubernetes.io/projected/e3a2d94e-61fb-406b-be5d-4ae5f0c18fda-kube-api-access-727zd\") pod \"ovn-operator-controller-manager-b6456fdb6-ktgl2\" (UID: \"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851737 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2wj6\" (UniqueName: \"kubernetes.io/projected/c81cbe79-aa85-4707-a3d6-246bf422575b-kube-api-access-z2wj6\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851822 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.851859 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvvpt\" (UniqueName: \"kubernetes.io/projected/8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca-kube-api-access-hvvpt\") pod \"placement-operator-controller-manager-78f8948974-bzkqb\" (UID: \"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.852346 4852 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.852410 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert podName:d2869063-cc44-4cd4-b1f6-5b33a5250e77 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:45.852389427 +0000 UTC m=+905.779470834 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert") pod "infra-operator-controller-manager-57548d458d-7w8xr" (UID: "d2869063-cc44-4cd4-b1f6-5b33a5250e77") : secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.853867 4852 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: E1201 20:19:44.853925 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert podName:c81cbe79-aa85-4707-a3d6-246bf422575b nodeName:}" failed. No retries permitted until 2025-12-01 20:19:45.353915475 +0000 UTC m=+905.280996882 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446t9t22" (UID: "c81cbe79-aa85-4707-a3d6-246bf422575b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.862084 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.884630 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2wj6\" (UniqueName: \"kubernetes.io/projected/c81cbe79-aa85-4707-a3d6-246bf422575b-kube-api-access-z2wj6\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.896128 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pzbx\" (UniqueName: \"kubernetes.io/projected/268f049d-790e-4b1f-958d-0f07ba335215-kube-api-access-5pzbx\") pod \"swift-operator-controller-manager-5f8c65bbfc-blr2g\" (UID: \"268f049d-790e-4b1f-958d-0f07ba335215\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.898232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvvpt\" (UniqueName: \"kubernetes.io/projected/8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca-kube-api-access-hvvpt\") pod \"placement-operator-controller-manager-78f8948974-bzkqb\" (UID: \"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.900085 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-727zd\" (UniqueName: \"kubernetes.io/projected/e3a2d94e-61fb-406b-be5d-4ae5f0c18fda-kube-api-access-727zd\") pod \"ovn-operator-controller-manager-b6456fdb6-ktgl2\" (UID: \"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.906345 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.909271 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.924266 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-hncmj"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.953719 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wpts\" (UniqueName: \"kubernetes.io/projected/1ae195c5-0850-4ca5-85e4-abc7ac4d79dc-kube-api-access-8wpts\") pod \"watcher-operator-controller-manager-769dc69bc-lslxr\" (UID: \"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.953825 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzqqv\" (UniqueName: \"kubernetes.io/projected/28cd4665-305c-4855-87c6-f267402d0b05-kube-api-access-hzqqv\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rqqkl\" (UID: \"28cd4665-305c-4855-87c6-f267402d0b05\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.953904 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5h5c\" (UniqueName: \"kubernetes.io/projected/7240ba3b-5f4b-4c63-99cf-4fe68d720fb5-kube-api-access-r5h5c\") pod \"test-operator-controller-manager-5854674fcc-hncmj\" (UID: \"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.971248 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzqqv\" (UniqueName: \"kubernetes.io/projected/28cd4665-305c-4855-87c6-f267402d0b05-kube-api-access-hzqqv\") pod \"telemetry-operator-controller-manager-76cc84c6bb-rqqkl\" (UID: \"28cd4665-305c-4855-87c6-f267402d0b05\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.983781 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9"] Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.985843 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.988723 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.988802 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-f869p" Dec 01 20:19:44 crc kubenswrapper[4852]: I1201 20:19:44.988895 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.000363 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.012445 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.023011 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.024767 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.032511 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-l4dhv" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.033586 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055158 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055234 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsdw7\" (UniqueName: \"kubernetes.io/projected/35e98bd5-b71c-4842-9511-52b5c9d8e25a-kube-api-access-lsdw7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pzvgc\" (UID: \"35e98bd5-b71c-4842-9511-52b5c9d8e25a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055283 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5h5c\" (UniqueName: \"kubernetes.io/projected/7240ba3b-5f4b-4c63-99cf-4fe68d720fb5-kube-api-access-r5h5c\") pod \"test-operator-controller-manager-5854674fcc-hncmj\" (UID: \"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055307 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055371 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wpts\" (UniqueName: \"kubernetes.io/projected/1ae195c5-0850-4ca5-85e4-abc7ac4d79dc-kube-api-access-8wpts\") pod \"watcher-operator-controller-manager-769dc69bc-lslxr\" (UID: \"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.055394 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svtws\" (UniqueName: \"kubernetes.io/projected/980395ee-3c8d-41a7-9663-7bc33fb4cd46-kube-api-access-svtws\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.081219 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerDied","Data":"f146db9e5cd23e597c7a93f9b07f36ab91ee8ed15ba734cbfffcffb893541283"} Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.081089 4852 generic.go:334] "Generic (PLEG): container finished" podID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerID="f146db9e5cd23e597c7a93f9b07f36ab91ee8ed15ba734cbfffcffb893541283" exitCode=0 Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.082109 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wpts\" (UniqueName: \"kubernetes.io/projected/1ae195c5-0850-4ca5-85e4-abc7ac4d79dc-kube-api-access-8wpts\") pod \"watcher-operator-controller-manager-769dc69bc-lslxr\" (UID: \"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.086812 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5h5c\" (UniqueName: \"kubernetes.io/projected/7240ba3b-5f4b-4c63-99cf-4fe68d720fb5-kube-api-access-r5h5c\") pod \"test-operator-controller-manager-5854674fcc-hncmj\" (UID: \"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.090161 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.126809 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.153439 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.157400 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svtws\" (UniqueName: \"kubernetes.io/projected/980395ee-3c8d-41a7-9663-7bc33fb4cd46-kube-api-access-svtws\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.157499 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.157536 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsdw7\" (UniqueName: \"kubernetes.io/projected/35e98bd5-b71c-4842-9511-52b5c9d8e25a-kube-api-access-lsdw7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pzvgc\" (UID: \"35e98bd5-b71c-4842-9511-52b5c9d8e25a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.157573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.157924 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.158046 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:45.658012829 +0000 UTC m=+905.585094246 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.158646 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.158736 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:45.658708411 +0000 UTC m=+905.585790008 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: W1201 20:19:45.166384 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod152c7905_249d_4195_afe6_7b02b5d8267d.slice/crio-837459887b986ea1a07c3641bbe5a0bbd6cfb9e0bc4f71811b4890345ebde90e WatchSource:0}: Error finding container 837459887b986ea1a07c3641bbe5a0bbd6cfb9e0bc4f71811b4890345ebde90e: Status 404 returned error can't find the container with id 837459887b986ea1a07c3641bbe5a0bbd6cfb9e0bc4f71811b4890345ebde90e Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.179693 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsdw7\" (UniqueName: \"kubernetes.io/projected/35e98bd5-b71c-4842-9511-52b5c9d8e25a-kube-api-access-lsdw7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pzvgc\" (UID: \"35e98bd5-b71c-4842-9511-52b5c9d8e25a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.208307 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svtws\" (UniqueName: \"kubernetes.io/projected/980395ee-3c8d-41a7-9663-7bc33fb4cd46-kube-api-access-svtws\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.222844 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.238690 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.345516 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.350845 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.355240 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.367513 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.370135 4852 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.370273 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert podName:c81cbe79-aa85-4707-a3d6-246bf422575b nodeName:}" failed. No retries permitted until 2025-12-01 20:19:46.37025345 +0000 UTC m=+906.297334867 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446t9t22" (UID: "c81cbe79-aa85-4707-a3d6-246bf422575b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.407138 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.480572 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.515903 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg"] Dec 01 20:19:45 crc kubenswrapper[4852]: W1201 20:19:45.552287 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefb3ff96_731f_4a01_8bed_636717f36fb4.slice/crio-f841d7f2fede00288f58c24265612bd238fe3da9d16375945cdf1998ab26fc63 WatchSource:0}: Error finding container f841d7f2fede00288f58c24265612bd238fe3da9d16375945cdf1998ab26fc63: Status 404 returned error can't find the container with id f841d7f2fede00288f58c24265612bd238fe3da9d16375945cdf1998ab26fc63 Dec 01 20:19:45 crc kubenswrapper[4852]: W1201 20:19:45.553672 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod714caded_89c7_44a3_a832_2fbaa0e00ac2.slice/crio-1870e48b5fd346d0711a5b5ba58564c42ca398707531f54d56c65fdd46b449ee WatchSource:0}: Error finding container 1870e48b5fd346d0711a5b5ba58564c42ca398707531f54d56c65fdd46b449ee: Status 404 returned error can't find the container with id 1870e48b5fd346d0711a5b5ba58564c42ca398707531f54d56c65fdd46b449ee Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.678526 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.678704 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.678774 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.678933 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:46.678884097 +0000 UTC m=+906.605965514 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.679093 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.679178 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:46.679139745 +0000 UTC m=+906.606221162 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.680228 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq"] Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.686287 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb"] Dec 01 20:19:45 crc kubenswrapper[4852]: W1201 20:19:45.687668 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ec2c5f6_679b_4f91_ab45_3eae7b12cd13.slice/crio-49c8db6cf22654364af7fb2d9c70eb438ae33978b12ab0be1d4431b055044f01 WatchSource:0}: Error finding container 49c8db6cf22654364af7fb2d9c70eb438ae33978b12ab0be1d4431b055044f01: Status 404 returned error can't find the container with id 49c8db6cf22654364af7fb2d9c70eb438ae33978b12ab0be1d4431b055044f01 Dec 01 20:19:45 crc kubenswrapper[4852]: W1201 20:19:45.698367 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7818ab0_4a52_48fe_a0c8_88d162745762.slice/crio-204c1f530be02c4eee344dab3bbe6375456c4b8e29b019b9322002741aed4896 WatchSource:0}: Error finding container 204c1f530be02c4eee344dab3bbe6375456c4b8e29b019b9322002741aed4896: Status 404 returned error can't find the container with id 204c1f530be02c4eee344dab3bbe6375456c4b8e29b019b9322002741aed4896 Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.882046 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.882662 4852 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: E1201 20:19:45.882823 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert podName:d2869063-cc44-4cd4-b1f6-5b33a5250e77 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:47.882785416 +0000 UTC m=+907.809867003 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert") pod "infra-operator-controller-manager-57548d458d-7w8xr" (UID: "d2869063-cc44-4cd4-b1f6-5b33a5250e77") : secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:45 crc kubenswrapper[4852]: I1201 20:19:45.993055 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.003901 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.009185 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.029488 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.040532 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.049405 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2phng"] Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.050542 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr6bt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-nh9t8_openstack-operators(3aa88cab-a21d-40d4-b278-8c006ce138ff): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.055408 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr6bt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-nh9t8_openstack-operators(3aa88cab-a21d-40d4-b278-8c006ce138ff): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.056617 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" podUID="3aa88cab-a21d-40d4-b278-8c006ce138ff" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.058275 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl"] Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.060553 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod268f049d_790e_4b1f_958d_0f07ba335215.slice/crio-a396db54960c82ed849968f5c4b909047eb2b1b6acbea0dc56d1703a076dca4f WatchSource:0}: Error finding container a396db54960c82ed849968f5c4b909047eb2b1b6acbea0dc56d1703a076dca4f: Status 404 returned error can't find the container with id a396db54960c82ed849968f5c4b909047eb2b1b6acbea0dc56d1703a076dca4f Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.064494 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5pzbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-blr2g_openstack-operators(268f049d-790e-4b1f-958d-0f07ba335215): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.066230 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e323f2e_eab0_4e2f_9f8c_a6c892fd89ca.slice/crio-f649628a7081c0f4f1a6b34c259d27dfdcb6f0d17756f9755ec1404efbe10d5e WatchSource:0}: Error finding container f649628a7081c0f4f1a6b34c259d27dfdcb6f0d17756f9755ec1404efbe10d5e: Status 404 returned error can't find the container with id f649628a7081c0f4f1a6b34c259d27dfdcb6f0d17756f9755ec1404efbe10d5e Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.066829 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5pzbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-blr2g_openstack-operators(268f049d-790e-4b1f-958d-0f07ba335215): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.067727 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb"] Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.068132 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" podUID="268f049d-790e-4b1f-958d-0f07ba335215" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.069444 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hvvpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-bzkqb_openstack-operators(8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.069817 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28cd4665_305c_4855_87c6_f267402d0b05.slice/crio-9e1eec30d58ab13da924e547ccd880316ecb9c075a384bcf9cc885ddab2765a1 WatchSource:0}: Error finding container 9e1eec30d58ab13da924e547ccd880316ecb9c075a384bcf9cc885ddab2765a1: Status 404 returned error can't find the container with id 9e1eec30d58ab13da924e547ccd880316ecb9c075a384bcf9cc885ddab2765a1 Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.071515 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hvvpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-bzkqb_openstack-operators(8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.072677 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hzqqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-rqqkl_openstack-operators(28cd4665-305c-4855-87c6-f267402d0b05): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.072798 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" podUID="8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.074625 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hzqqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-rqqkl_openstack-operators(28cd4665-305c-4855-87c6-f267402d0b05): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.075902 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" podUID="28cd4665-305c-4855-87c6-f267402d0b05" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.076420 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.092889 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" event={"ID":"7d884d8c-acfc-47fe-bee2-f0248f8b0eea","Type":"ContainerStarted","Data":"75e003f4930300a3411ca309ca0ef3826e7bc40d1e79d180287df36be7855969"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.095919 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" event={"ID":"6ded5e30-894b-4718-b10e-6cdcf29ea854","Type":"ContainerStarted","Data":"cc205ea7dc999b604021a876e3079ff8ea4e7a47af1650fb1b3f6a79b853b0a3"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.097991 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" event={"ID":"268f049d-790e-4b1f-958d-0f07ba335215","Type":"ContainerStarted","Data":"a396db54960c82ed849968f5c4b909047eb2b1b6acbea0dc56d1703a076dca4f"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.099959 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" event={"ID":"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca","Type":"ContainerStarted","Data":"f649628a7081c0f4f1a6b34c259d27dfdcb6f0d17756f9755ec1404efbe10d5e"} Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.102432 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" podUID="268f049d-790e-4b1f-958d-0f07ba335215" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.103166 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" podUID="8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.105698 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" event={"ID":"2842a3ca-0708-4395-babd-b9dbdc1509d8","Type":"ContainerStarted","Data":"31d5a56962266192c0b5eb3e8e3a0b40dac57bf9898c4376798e3aa92852e689"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.107653 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" event={"ID":"28cd4665-305c-4855-87c6-f267402d0b05","Type":"ContainerStarted","Data":"9e1eec30d58ab13da924e547ccd880316ecb9c075a384bcf9cc885ddab2765a1"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.109897 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" event={"ID":"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4","Type":"ContainerStarted","Data":"016483de9d391fd681bc37def45720fdfde04da9807aa71b001eee0e50d3c9d7"} Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.110817 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" podUID="28cd4665-305c-4855-87c6-f267402d0b05" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.113334 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" event={"ID":"4908b6e5-acd8-4754-877f-18a3b8897aa5","Type":"ContainerStarted","Data":"7a3ffeb9a3dc81f3d7558f57f200549bdacdbe8883a50d9f71d40abd2030b149"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.118955 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" event={"ID":"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda","Type":"ContainerStarted","Data":"e09cd6342c9982d46f97eb6d840eda457d63ea024b91ead5d4bb8d2b5a202004"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.126305 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" event={"ID":"efb3ff96-731f-4a01-8bed-636717f36fb4","Type":"ContainerStarted","Data":"f841d7f2fede00288f58c24265612bd238fe3da9d16375945cdf1998ab26fc63"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.128163 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" event={"ID":"b9f7343b-9bba-43e0-bb25-80a5f3fb139f","Type":"ContainerStarted","Data":"9fe76071a515c49b28741ab95af054b27fd6d3c3e1dd8f99dce32e5d4b1b9a0e"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.130369 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" event={"ID":"b7818ab0-4a52-48fe-a0c8-88d162745762","Type":"ContainerStarted","Data":"204c1f530be02c4eee344dab3bbe6375456c4b8e29b019b9322002741aed4896"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.132600 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" event={"ID":"152c7905-249d-4195-afe6-7b02b5d8267d","Type":"ContainerStarted","Data":"837459887b986ea1a07c3641bbe5a0bbd6cfb9e0bc4f71811b4890345ebde90e"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.135359 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" event={"ID":"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13","Type":"ContainerStarted","Data":"49c8db6cf22654364af7fb2d9c70eb438ae33978b12ab0be1d4431b055044f01"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.136596 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" event={"ID":"714caded-89c7-44a3-a832-2fbaa0e00ac2","Type":"ContainerStarted","Data":"1870e48b5fd346d0711a5b5ba58564c42ca398707531f54d56c65fdd46b449ee"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.138914 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" event={"ID":"3aa88cab-a21d-40d4-b278-8c006ce138ff","Type":"ContainerStarted","Data":"171f6c770039729fe8777a752a9cf4126bbd696761cc3057d08c9dce1ad2ba6a"} Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.142044 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" event={"ID":"cc6a88c3-1e45-470c-ba3b-c15c83afbcec","Type":"ContainerStarted","Data":"16c07d9335cad36eb687214045cb9643955660b3285eddadfe36304827f9a37d"} Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.142753 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" podUID="3aa88cab-a21d-40d4-b278-8c006ce138ff" Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.148824 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ae195c5_0850_4ca5_85e4_abc7ac4d79dc.slice/crio-f7be07efab21665ae2fd901f0a88d0c429046868a91c060c0b011f005fbd51ca WatchSource:0}: Error finding container f7be07efab21665ae2fd901f0a88d0c429046868a91c060c0b011f005fbd51ca: Status 404 returned error can't find the container with id f7be07efab21665ae2fd901f0a88d0c429046868a91c060c0b011f005fbd51ca Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.149089 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35e98bd5_b71c_4842_9511_52b5c9d8e25a.slice/crio-66853f9bf2a2f18983b82b0d90f755f9af77cf0fd8d225601d2d77093c9bff32 WatchSource:0}: Error finding container 66853f9bf2a2f18983b82b0d90f755f9af77cf0fd8d225601d2d77093c9bff32: Status 404 returned error can't find the container with id 66853f9bf2a2f18983b82b0d90f755f9af77cf0fd8d225601d2d77093c9bff32 Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.149932 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc"] Dec 01 20:19:46 crc kubenswrapper[4852]: W1201 20:19:46.154576 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7240ba3b_5f4b_4c63_99cf_4fe68d720fb5.slice/crio-e8033c3fbc763114ad0bb3e92a703f8d318dca16cae31fa470e0986cbf30efe8 WatchSource:0}: Error finding container e8033c3fbc763114ad0bb3e92a703f8d318dca16cae31fa470e0986cbf30efe8: Status 404 returned error can't find the container with id e8033c3fbc763114ad0bb3e92a703f8d318dca16cae31fa470e0986cbf30efe8 Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.155087 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lsdw7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-pzvgc_openstack-operators(35e98bd5-b71c-4842-9511-52b5c9d8e25a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.156248 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" podUID="35e98bd5-b71c-4842-9511-52b5c9d8e25a" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.161893 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r5h5c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-hncmj_openstack-operators(7240ba3b-5f4b-4c63-99cf-4fe68d720fb5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.162648 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr"] Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.164138 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r5h5c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-hncmj_openstack-operators(7240ba3b-5f4b-4c63-99cf-4fe68d720fb5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.165373 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" podUID="7240ba3b-5f4b-4c63-99cf-4fe68d720fb5" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.173030 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-hncmj"] Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.391817 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.392756 4852 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.392899 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert podName:c81cbe79-aa85-4707-a3d6-246bf422575b nodeName:}" failed. No retries permitted until 2025-12-01 20:19:48.392864235 +0000 UTC m=+908.319945832 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446t9t22" (UID: "c81cbe79-aa85-4707-a3d6-246bf422575b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.699072 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:46 crc kubenswrapper[4852]: I1201 20:19:46.699211 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.699376 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.699400 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.699536 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:48.699516709 +0000 UTC m=+908.626598126 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:19:46 crc kubenswrapper[4852]: E1201 20:19:46.699557 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:48.69954689 +0000 UTC m=+908.626628307 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:19:47 crc kubenswrapper[4852]: I1201 20:19:47.176396 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" event={"ID":"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5","Type":"ContainerStarted","Data":"e8033c3fbc763114ad0bb3e92a703f8d318dca16cae31fa470e0986cbf30efe8"} Dec 01 20:19:47 crc kubenswrapper[4852]: I1201 20:19:47.179028 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" event={"ID":"35e98bd5-b71c-4842-9511-52b5c9d8e25a","Type":"ContainerStarted","Data":"66853f9bf2a2f18983b82b0d90f755f9af77cf0fd8d225601d2d77093c9bff32"} Dec 01 20:19:47 crc kubenswrapper[4852]: I1201 20:19:47.181056 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" event={"ID":"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc","Type":"ContainerStarted","Data":"f7be07efab21665ae2fd901f0a88d0c429046868a91c060c0b011f005fbd51ca"} Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.181845 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" podUID="35e98bd5-b71c-4842-9511-52b5c9d8e25a" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.182868 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" podUID="7240ba3b-5f4b-4c63-99cf-4fe68d720fb5" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.183609 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" podUID="3aa88cab-a21d-40d4-b278-8c006ce138ff" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.184445 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" podUID="8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.184925 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" podUID="268f049d-790e-4b1f-958d-0f07ba335215" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.185571 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" podUID="28cd4665-305c-4855-87c6-f267402d0b05" Dec 01 20:19:47 crc kubenswrapper[4852]: I1201 20:19:47.929928 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.930165 4852 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:47 crc kubenswrapper[4852]: E1201 20:19:47.930489 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert podName:d2869063-cc44-4cd4-b1f6-5b33a5250e77 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:51.930428692 +0000 UTC m=+911.857510109 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert") pod "infra-operator-controller-manager-57548d458d-7w8xr" (UID: "d2869063-cc44-4cd4-b1f6-5b33a5250e77") : secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.193066 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" podUID="35e98bd5-b71c-4842-9511-52b5c9d8e25a" Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.197362 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" podUID="7240ba3b-5f4b-4c63-99cf-4fe68d720fb5" Dec 01 20:19:48 crc kubenswrapper[4852]: I1201 20:19:48.440813 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.442007 4852 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.442086 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert podName:c81cbe79-aa85-4707-a3d6-246bf422575b nodeName:}" failed. No retries permitted until 2025-12-01 20:19:52.44206549 +0000 UTC m=+912.369146907 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446t9t22" (UID: "c81cbe79-aa85-4707-a3d6-246bf422575b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: I1201 20:19:48.745946 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:48 crc kubenswrapper[4852]: I1201 20:19:48.746147 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.746330 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.746393 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:52.746374041 +0000 UTC m=+912.673455458 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.746937 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:19:48 crc kubenswrapper[4852]: E1201 20:19:48.746981 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:19:52.74697099 +0000 UTC m=+912.674052407 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:19:50 crc kubenswrapper[4852]: I1201 20:19:50.230283 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:19:50 crc kubenswrapper[4852]: I1201 20:19:50.230738 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:19:52 crc kubenswrapper[4852]: I1201 20:19:52.006320 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:19:52 crc kubenswrapper[4852]: E1201 20:19:52.006638 4852 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:52 crc kubenswrapper[4852]: E1201 20:19:52.006747 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert podName:d2869063-cc44-4cd4-b1f6-5b33a5250e77 nodeName:}" failed. No retries permitted until 2025-12-01 20:20:00.006716268 +0000 UTC m=+919.933797695 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert") pod "infra-operator-controller-manager-57548d458d-7w8xr" (UID: "d2869063-cc44-4cd4-b1f6-5b33a5250e77") : secret "infra-operator-webhook-server-cert" not found Dec 01 20:19:52 crc kubenswrapper[4852]: I1201 20:19:52.515232 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:19:52 crc kubenswrapper[4852]: E1201 20:19:52.515433 4852 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:52 crc kubenswrapper[4852]: E1201 20:19:52.515945 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert podName:c81cbe79-aa85-4707-a3d6-246bf422575b nodeName:}" failed. No retries permitted until 2025-12-01 20:20:00.515920019 +0000 UTC m=+920.443001436 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446t9t22" (UID: "c81cbe79-aa85-4707-a3d6-246bf422575b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 20:19:52 crc kubenswrapper[4852]: I1201 20:19:52.754792 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:19:52 crc kubenswrapper[4852]: I1201 20:19:52.756151 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:52 crc kubenswrapper[4852]: I1201 20:19:52.785505 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.253517 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.253593 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.253642 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jtmh\" (UniqueName: \"kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.253760 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.253813 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: E1201 20:19:53.254554 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:19:53 crc kubenswrapper[4852]: E1201 20:19:53.254635 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:20:01.254612413 +0000 UTC m=+921.181693870 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:19:53 crc kubenswrapper[4852]: E1201 20:19:53.255144 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:19:53 crc kubenswrapper[4852]: E1201 20:19:53.255169 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:20:01.255160851 +0000 UTC m=+921.182242278 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.355603 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.355663 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jtmh\" (UniqueName: \"kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.355789 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.356233 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.356475 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.408903 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jtmh\" (UniqueName: \"kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh\") pod \"certified-operators-lzq9l\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:19:53 crc kubenswrapper[4852]: I1201 20:19:53.682968 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.062577 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.068800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2869063-cc44-4cd4-b1f6-5b33a5250e77-cert\") pod \"infra-operator-controller-manager-57548d458d-7w8xr\" (UID: \"d2869063-cc44-4cd4-b1f6-5b33a5250e77\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.320002 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.574913 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.583038 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c81cbe79-aa85-4707-a3d6-246bf422575b-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446t9t22\" (UID: \"c81cbe79-aa85-4707-a3d6-246bf422575b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:20:00 crc kubenswrapper[4852]: I1201 20:20:00.594860 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:20:01 crc kubenswrapper[4852]: I1201 20:20:01.287731 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:01 crc kubenswrapper[4852]: I1201 20:20:01.288241 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:01 crc kubenswrapper[4852]: E1201 20:20:01.288064 4852 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 20:20:01 crc kubenswrapper[4852]: E1201 20:20:01.288444 4852 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 20:20:01 crc kubenswrapper[4852]: E1201 20:20:01.288490 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:20:17.288438576 +0000 UTC m=+937.215520003 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "metrics-server-cert" not found Dec 01 20:20:01 crc kubenswrapper[4852]: E1201 20:20:01.288553 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs podName:980395ee-3c8d-41a7-9663-7bc33fb4cd46 nodeName:}" failed. No retries permitted until 2025-12-01 20:20:17.288528779 +0000 UTC m=+937.215610206 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-qvhk9" (UID: "980395ee-3c8d-41a7-9663-7bc33fb4cd46") : secret "webhook-server-cert" not found Dec 01 20:20:02 crc kubenswrapper[4852]: E1201 20:20:02.449398 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:ecf7be921850bdc04697ed1b332bab39ad2a64e4e45c2a445c04f9bae6ac61b5" Dec 01 20:20:02 crc kubenswrapper[4852]: E1201 20:20:02.449941 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:ecf7be921850bdc04697ed1b332bab39ad2a64e4e45c2a445c04f9bae6ac61b5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rlrzz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6546668bfd-lz6m7_openstack-operators(4908b6e5-acd8-4754-877f-18a3b8897aa5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:03 crc kubenswrapper[4852]: E1201 20:20:03.048792 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 01 20:20:03 crc kubenswrapper[4852]: E1201 20:20:03.048968 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6q45d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-tdw8l_openstack-operators(2842a3ca-0708-4395-babd-b9dbdc1509d8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:03 crc kubenswrapper[4852]: E1201 20:20:03.603993 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 01 20:20:03 crc kubenswrapper[4852]: E1201 20:20:03.604162 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2m6gs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-rpswz_openstack-operators(6ded5e30-894b-4718-b10e-6cdcf29ea854): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:04 crc kubenswrapper[4852]: E1201 20:20:04.228743 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 01 20:20:04 crc kubenswrapper[4852]: E1201 20:20:04.229011 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t9t24,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-2phng_openstack-operators(cc6a88c3-1e45-470c-ba3b-c15c83afbcec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:04 crc kubenswrapper[4852]: E1201 20:20:04.784214 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 01 20:20:04 crc kubenswrapper[4852]: E1201 20:20:04.784415 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8wpts,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-lslxr_openstack-operators(1ae195c5-0850-4ca5-85e4-abc7ac4d79dc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:05 crc kubenswrapper[4852]: E1201 20:20:05.383393 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 01 20:20:05 crc kubenswrapper[4852]: E1201 20:20:05.383631 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gq4z4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-5mktq_openstack-operators(4ec2c5f6-679b-4f91-ab45-3eae7b12cd13): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:20:09 crc kubenswrapper[4852]: I1201 20:20:09.413163 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerStarted","Data":"1f9b8b4b8aae1471c3fd8070e43184080720c3bb07594a00c345aa2362731234"} Dec 01 20:20:09 crc kubenswrapper[4852]: I1201 20:20:09.422815 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:20:09 crc kubenswrapper[4852]: I1201 20:20:09.686222 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr"] Dec 01 20:20:09 crc kubenswrapper[4852]: I1201 20:20:09.750343 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22"] Dec 01 20:20:10 crc kubenswrapper[4852]: W1201 20:20:10.201222 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc81cbe79_aa85_4707_a3d6_246bf422575b.slice/crio-bc2d331503189214da270d345fcd2977ba5225ecb6c9567d4e808f707c7dd5a7 WatchSource:0}: Error finding container bc2d331503189214da270d345fcd2977ba5225ecb6c9567d4e808f707c7dd5a7: Status 404 returned error can't find the container with id bc2d331503189214da270d345fcd2977ba5225ecb6c9567d4e808f707c7dd5a7 Dec 01 20:20:10 crc kubenswrapper[4852]: W1201 20:20:10.206890 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca3401d4_95a5_47a0_bedf_56036f56a819.slice/crio-91cab9106ed1c2622473698efe0c88fef36b3c056002641bf5dc650c56a23fd0 WatchSource:0}: Error finding container 91cab9106ed1c2622473698efe0c88fef36b3c056002641bf5dc650c56a23fd0: Status 404 returned error can't find the container with id 91cab9106ed1c2622473698efe0c88fef36b3c056002641bf5dc650c56a23fd0 Dec 01 20:20:10 crc kubenswrapper[4852]: W1201 20:20:10.208032 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2869063_cc44_4cd4_b1f6_5b33a5250e77.slice/crio-2c1f6119942457c94a6cca02e54ee568e9c491d9bd61de3d66f70637ab73e9e3 WatchSource:0}: Error finding container 2c1f6119942457c94a6cca02e54ee568e9c491d9bd61de3d66f70637ab73e9e3: Status 404 returned error can't find the container with id 2c1f6119942457c94a6cca02e54ee568e9c491d9bd61de3d66f70637ab73e9e3 Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.422347 4852 generic.go:334] "Generic (PLEG): container finished" podID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerID="1f9b8b4b8aae1471c3fd8070e43184080720c3bb07594a00c345aa2362731234" exitCode=0 Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.422417 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerDied","Data":"1f9b8b4b8aae1471c3fd8070e43184080720c3bb07594a00c345aa2362731234"} Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.425763 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" event={"ID":"b9f7343b-9bba-43e0-bb25-80a5f3fb139f","Type":"ContainerStarted","Data":"1e153214ddec0e779a90fe0e094ed0c6afbb83d77760f6ace10850a245a58e17"} Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.427679 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerStarted","Data":"91cab9106ed1c2622473698efe0c88fef36b3c056002641bf5dc650c56a23fd0"} Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.429049 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" event={"ID":"c81cbe79-aa85-4707-a3d6-246bf422575b","Type":"ContainerStarted","Data":"bc2d331503189214da270d345fcd2977ba5225ecb6c9567d4e808f707c7dd5a7"} Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.430451 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" event={"ID":"7d884d8c-acfc-47fe-bee2-f0248f8b0eea","Type":"ContainerStarted","Data":"04a5070def86215b5fdf788d9610141a270fcab1104feeb825892dccb506b309"} Dec 01 20:20:10 crc kubenswrapper[4852]: I1201 20:20:10.431755 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" event={"ID":"d2869063-cc44-4cd4-b1f6-5b33a5250e77","Type":"ContainerStarted","Data":"2c1f6119942457c94a6cca02e54ee568e9c491d9bd61de3d66f70637ab73e9e3"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.456935 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" event={"ID":"efb3ff96-731f-4a01-8bed-636717f36fb4","Type":"ContainerStarted","Data":"ffecfe8597b06f6c91311c64d48d505c092768b66dd534e137a7fe0dfe6bb053"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.462270 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" event={"ID":"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4","Type":"ContainerStarted","Data":"9988de42c44923a5649e734aceee14328b3484d09881d52e6c547523c01ab502"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.464760 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" event={"ID":"b7818ab0-4a52-48fe-a0c8-88d162745762","Type":"ContainerStarted","Data":"77f0919a0cd489557f2eeb06b0670e38c67d8b90d660d91a5947bfb918d45d9e"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.467580 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" event={"ID":"152c7905-249d-4195-afe6-7b02b5d8267d","Type":"ContainerStarted","Data":"d26e46fea7af3b64f67e865bbc7a19180d0425886d5556c9688e5ec6016a7433"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.471266 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" event={"ID":"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda","Type":"ContainerStarted","Data":"1295439efeaffed13a690f60e814aa5c37ab301b32a2fbcad514b02cea0de281"} Dec 01 20:20:11 crc kubenswrapper[4852]: I1201 20:20:11.474992 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" event={"ID":"714caded-89c7-44a3-a832-2fbaa0e00ac2","Type":"ContainerStarted","Data":"622b226dffb19c92439524b516eb10cdb7eb1b1d347f45c790b8b2e9248fce60"} Dec 01 20:20:12 crc kubenswrapper[4852]: I1201 20:20:12.501568 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" event={"ID":"3aa88cab-a21d-40d4-b278-8c006ce138ff","Type":"ContainerStarted","Data":"5036ff2785b29dd616fe0f42cd034f1271ca3f3a2a6a5c8ef1d0af1fb418c565"} Dec 01 20:20:12 crc kubenswrapper[4852]: I1201 20:20:12.509954 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" event={"ID":"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca","Type":"ContainerStarted","Data":"f5e404899ed179f8d086dc3b7ce0e413698fbb69825babdd3c77f2413b2e3e33"} Dec 01 20:20:12 crc kubenswrapper[4852]: I1201 20:20:12.512235 4852 generic.go:334] "Generic (PLEG): container finished" podID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerID="31850711fc9d30f646c46bdf315c24a2aea4af0f00686dc98962694261724e27" exitCode=0 Dec 01 20:20:12 crc kubenswrapper[4852]: I1201 20:20:12.512290 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerDied","Data":"31850711fc9d30f646c46bdf315c24a2aea4af0f00686dc98962694261724e27"} Dec 01 20:20:13 crc kubenswrapper[4852]: I1201 20:20:13.520895 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" event={"ID":"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5","Type":"ContainerStarted","Data":"bd48d8b4458275d96a1e566c22cc51abb011d856177a2fb77e2adc96a787bf5d"} Dec 01 20:20:13 crc kubenswrapper[4852]: I1201 20:20:13.524525 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" event={"ID":"268f049d-790e-4b1f-958d-0f07ba335215","Type":"ContainerStarted","Data":"29cbf6ef89b31642b4f8b995d3ec5b4462e19939da3b2a1ffcb528860d24da91"} Dec 01 20:20:14 crc kubenswrapper[4852]: I1201 20:20:14.538492 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" event={"ID":"35e98bd5-b71c-4842-9511-52b5c9d8e25a","Type":"ContainerStarted","Data":"74c49cb6c66261bc420f8d96858356fe6640b0b8c26d848c661ac3855f22f0b8"} Dec 01 20:20:14 crc kubenswrapper[4852]: I1201 20:20:14.548753 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" event={"ID":"28cd4665-305c-4855-87c6-f267402d0b05","Type":"ContainerStarted","Data":"922318611b412ec23e5a575a389e181e57a18be56f5b2d73b1554c5319ec7b4f"} Dec 01 20:20:14 crc kubenswrapper[4852]: E1201 20:20:14.971080 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" podUID="2842a3ca-0708-4395-babd-b9dbdc1509d8" Dec 01 20:20:14 crc kubenswrapper[4852]: E1201 20:20:14.971418 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" podUID="1ae195c5-0850-4ca5-85e4-abc7ac4d79dc" Dec 01 20:20:15 crc kubenswrapper[4852]: E1201 20:20:15.349537 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" podUID="6ded5e30-894b-4718-b10e-6cdcf29ea854" Dec 01 20:20:15 crc kubenswrapper[4852]: E1201 20:20:15.354146 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" podUID="cc6a88c3-1e45-470c-ba3b-c15c83afbcec" Dec 01 20:20:15 crc kubenswrapper[4852]: E1201 20:20:15.354207 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" podUID="4ec2c5f6-679b-4f91-ab45-3eae7b12cd13" Dec 01 20:20:15 crc kubenswrapper[4852]: E1201 20:20:15.356222 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" podUID="4908b6e5-acd8-4754-877f-18a3b8897aa5" Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.567632 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerStarted","Data":"2284a0a6685420f29bbbb4be42f5dda16303ebdff5fc733340160a4e64401fdd"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.570330 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" event={"ID":"4908b6e5-acd8-4754-877f-18a3b8897aa5","Type":"ContainerStarted","Data":"1e3439e0ae48bd9c291b94bff3b58dc2640068a2b1c93f96f391d1682c1bfa05"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.572152 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" event={"ID":"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc","Type":"ContainerStarted","Data":"33bd3fe2b86677569322bdcfcd662485e594ef401b04bcce412b39f2388d0e45"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.574759 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" event={"ID":"cc6a88c3-1e45-470c-ba3b-c15c83afbcec","Type":"ContainerStarted","Data":"f9d8fda929bbbb9368e7aeacb6bafda19cd3f8815b9b723aca56b5ece7a53d6f"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.599238 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-95kl4" podStartSLOduration=4.317681166 podStartE2EDuration="33.599209716s" podCreationTimestamp="2025-12-01 20:19:42 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.087089203 +0000 UTC m=+905.014170620" lastFinishedPulling="2025-12-01 20:20:14.368617743 +0000 UTC m=+934.295699170" observedRunningTime="2025-12-01 20:20:15.593659882 +0000 UTC m=+935.520741309" watchObservedRunningTime="2025-12-01 20:20:15.599209716 +0000 UTC m=+935.526291133" Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.600695 4852 generic.go:334] "Generic (PLEG): container finished" podID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerID="1859f651d474e168d33cc2e38d7085e1d63afb4e49bf70bf06c4f0e3b41af9d4" exitCode=0 Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.600826 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerDied","Data":"1859f651d474e168d33cc2e38d7085e1d63afb4e49bf70bf06c4f0e3b41af9d4"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.618508 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" event={"ID":"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13","Type":"ContainerStarted","Data":"5682219a7da4e0d60710a0e1a7aa6ae08bf4a8d151c4668d690e04020ac74d10"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.636751 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" event={"ID":"28cd4665-305c-4855-87c6-f267402d0b05","Type":"ContainerStarted","Data":"72a56cef3b7af39742c20f37941de084daedc656eb5e28277cab9707129b9930"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.637699 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.639793 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" event={"ID":"d2869063-cc44-4cd4-b1f6-5b33a5250e77","Type":"ContainerStarted","Data":"eeee96d1fc6580879d7343b423e4f1539e270adb9566b55d571e356e25799d13"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.646561 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" event={"ID":"6ded5e30-894b-4718-b10e-6cdcf29ea854","Type":"ContainerStarted","Data":"9c2fde5d3e839b79af03003b42b6a312bbcd65a3984b0bcbbc9caeb2d0022edb"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.662504 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" event={"ID":"2842a3ca-0708-4395-babd-b9dbdc1509d8","Type":"ContainerStarted","Data":"661adb060cfb5242b69709eee5ce020e4eff867d8f11d195667d078a11f6acc8"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.679473 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" event={"ID":"c81cbe79-aa85-4707-a3d6-246bf422575b","Type":"ContainerStarted","Data":"3992560cfc3219ee89d983cfb2194e73966d813b28fe0d1066d14b84a99bd051"} Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.742605 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" podStartSLOduration=7.114601589 podStartE2EDuration="31.742583835s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.072511631 +0000 UTC m=+905.999593048" lastFinishedPulling="2025-12-01 20:20:10.700493877 +0000 UTC m=+930.627575294" observedRunningTime="2025-12-01 20:20:15.729579107 +0000 UTC m=+935.656660524" watchObservedRunningTime="2025-12-01 20:20:15.742583835 +0000 UTC m=+935.669665252" Dec 01 20:20:15 crc kubenswrapper[4852]: I1201 20:20:15.861855 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pzvgc" podStartSLOduration=7.237138575 podStartE2EDuration="31.861827288s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.154905457 +0000 UTC m=+906.081986874" lastFinishedPulling="2025-12-01 20:20:10.77959417 +0000 UTC m=+930.706675587" observedRunningTime="2025-12-01 20:20:15.858818933 +0000 UTC m=+935.785900350" watchObservedRunningTime="2025-12-01 20:20:15.861827288 +0000 UTC m=+935.788908705" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.704704 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" event={"ID":"fa3d71fd-60b1-488c-9ae0-abb212b8d0a4","Type":"ContainerStarted","Data":"c8baaba1d5a691edf00205ec00e8db29979d4c2c40e51eba7d4d8ed79f73b322"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.705861 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.712168 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.717657 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" event={"ID":"d2869063-cc44-4cd4-b1f6-5b33a5250e77","Type":"ContainerStarted","Data":"c57c9a04fb047af829dc79113aaebc270513e62aa6a8488096442c9080ec44ab"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.717960 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.726801 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" event={"ID":"6ded5e30-894b-4718-b10e-6cdcf29ea854","Type":"ContainerStarted","Data":"8bf118026d90a0b5024dec39398270148484489761ee394f2fc7d4b10af37b40"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.727113 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.738233 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q7fhl" podStartSLOduration=4.427038193 podStartE2EDuration="33.738207043s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.374726191 +0000 UTC m=+905.301807608" lastFinishedPulling="2025-12-01 20:20:14.685895031 +0000 UTC m=+934.612976458" observedRunningTime="2025-12-01 20:20:16.729687026 +0000 UTC m=+936.656768443" watchObservedRunningTime="2025-12-01 20:20:16.738207043 +0000 UTC m=+936.665288460" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.741077 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" event={"ID":"4ec2c5f6-679b-4f91-ab45-3eae7b12cd13","Type":"ContainerStarted","Data":"78dc90609783e0c000b8b12a58f710e53861771885987c4b61a3f673de0bb746"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.742038 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.763425 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" event={"ID":"7d884d8c-acfc-47fe-bee2-f0248f8b0eea","Type":"ContainerStarted","Data":"29a5882b21c0e77cef395cf2dc020f1283bd7b5d619327646aafaa5c19acbd1a"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.764881 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.770142 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.771669 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" event={"ID":"3aa88cab-a21d-40d4-b278-8c006ce138ff","Type":"ContainerStarted","Data":"ad023659d5d2066e2df51b841afc01906e658f3c789a1053d6face59cad41183"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.775650 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.792732 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" event={"ID":"b9f7343b-9bba-43e0-bb25-80a5f3fb139f","Type":"ContainerStarted","Data":"0cb7e2e69428315c033240c7f0758f886576886a1e7a61e666a88491000b3b84"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.793757 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.803886 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.821550 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" event={"ID":"c81cbe79-aa85-4707-a3d6-246bf422575b","Type":"ContainerStarted","Data":"2d0f4b59cec0219f56cc41eb8f576ee840f6de1e4da12be7c37282f78790c8f5"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.822308 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.843643 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" podStartSLOduration=29.714705725 podStartE2EDuration="33.843625752s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:20:10.239782648 +0000 UTC m=+930.166864075" lastFinishedPulling="2025-12-01 20:20:14.368702685 +0000 UTC m=+934.295784102" observedRunningTime="2025-12-01 20:20:16.804108431 +0000 UTC m=+936.731189858" watchObservedRunningTime="2025-12-01 20:20:16.843625752 +0000 UTC m=+936.770707169" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.844835 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" podStartSLOduration=2.557120701 podStartE2EDuration="32.84483086s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.020260981 +0000 UTC m=+905.947342398" lastFinishedPulling="2025-12-01 20:20:16.30797114 +0000 UTC m=+936.235052557" observedRunningTime="2025-12-01 20:20:16.842120415 +0000 UTC m=+936.769201832" watchObservedRunningTime="2025-12-01 20:20:16.84483086 +0000 UTC m=+936.771912277" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.849413 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" event={"ID":"7240ba3b-5f4b-4c63-99cf-4fe68d720fb5","Type":"ContainerStarted","Data":"413bd6b6aa831c7d82da869d0ee126182c9ce43c883bd1f10426dcd1d6f6e812"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.849748 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.878942 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" event={"ID":"efb3ff96-731f-4a01-8bed-636717f36fb4","Type":"ContainerStarted","Data":"f836b57af00e358af0766a6cd1ccc82399ce7eeb2a0ca56f0fbf3ccb92bc3cf2"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.878999 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.878981 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" podStartSLOduration=4.237833619 podStartE2EDuration="32.87896118s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.050250972 +0000 UTC m=+905.977332399" lastFinishedPulling="2025-12-01 20:20:14.691378543 +0000 UTC m=+934.618459960" observedRunningTime="2025-12-01 20:20:16.876822083 +0000 UTC m=+936.803903500" watchObservedRunningTime="2025-12-01 20:20:16.87896118 +0000 UTC m=+936.806042597" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.885717 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.894594 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" event={"ID":"268f049d-790e-4b1f-958d-0f07ba335215","Type":"ContainerStarted","Data":"b2b51ea77bbd93f65958267e5e8601b5e27bba363b1f23e5396af1a7dd8a71a5"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.895539 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.907770 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" event={"ID":"cc6a88c3-1e45-470c-ba3b-c15c83afbcec","Type":"ContainerStarted","Data":"142ca34a7fdcdcd7f51f3ad0a3c991b5522a1935389267151cb854e3f7356047"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.908700 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.930717 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" event={"ID":"8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca","Type":"ContainerStarted","Data":"1df2f5a2a27c6a44f3599c43a050a7d006984c82605925e1ee1bbe01e429f936"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.932073 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.937637 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" podStartSLOduration=2.434062348 podStartE2EDuration="32.937615891s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.690430459 +0000 UTC m=+905.617511876" lastFinishedPulling="2025-12-01 20:20:16.193984002 +0000 UTC m=+936.121065419" observedRunningTime="2025-12-01 20:20:16.937570591 +0000 UTC m=+936.864652008" watchObservedRunningTime="2025-12-01 20:20:16.937615891 +0000 UTC m=+936.864697308" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.938666 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bd4jr" podStartSLOduration=4.26447972 podStartE2EDuration="33.938660974s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.333389993 +0000 UTC m=+905.260471410" lastFinishedPulling="2025-12-01 20:20:15.007571237 +0000 UTC m=+934.934652664" observedRunningTime="2025-12-01 20:20:16.90697487 +0000 UTC m=+936.834056287" watchObservedRunningTime="2025-12-01 20:20:16.938660974 +0000 UTC m=+936.865742391" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.939005 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.968019 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" event={"ID":"e3a2d94e-61fb-406b-be5d-4ae5f0c18fda","Type":"ContainerStarted","Data":"b5158d9c2eccebe1f6d3860db5bd4ecbb3583c416667379594eb542a953dd956"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.968972 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.982916 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.993014 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" event={"ID":"b7818ab0-4a52-48fe-a0c8-88d162745762","Type":"ContainerStarted","Data":"7ce4aea51444c8356404b57247014f0b4f117322c9cd8bb4bad2123b7ff9e9a1"} Dec 01 20:20:16 crc kubenswrapper[4852]: I1201 20:20:16.994229 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.000730 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" event={"ID":"152c7905-249d-4195-afe6-7b02b5d8267d","Type":"ContainerStarted","Data":"4f862a6a8ca6cd49def9f75361d3ba8c0dbdea334953ae24038f98e56b25dc09"} Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.001919 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.002156 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.003267 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" podStartSLOduration=28.872172657 podStartE2EDuration="33.003242191s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:20:10.239762358 +0000 UTC m=+930.166843775" lastFinishedPulling="2025-12-01 20:20:14.370831892 +0000 UTC m=+934.297913309" observedRunningTime="2025-12-01 20:20:16.992626868 +0000 UTC m=+936.919708295" watchObservedRunningTime="2025-12-01 20:20:17.003242191 +0000 UTC m=+936.930323608" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.006501 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" event={"ID":"714caded-89c7-44a3-a832-2fbaa0e00ac2","Type":"ContainerStarted","Data":"1f1242c01a632d9e66626a8dc4172f08af309ed6663e46a8ca574501de513494"} Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.017583 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-z7gzm" podStartSLOduration=4.689844481 podStartE2EDuration="34.017561871s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.358822221 +0000 UTC m=+905.285903638" lastFinishedPulling="2025-12-01 20:20:14.686539611 +0000 UTC m=+934.613621028" observedRunningTime="2025-12-01 20:20:17.014131633 +0000 UTC m=+936.941213050" watchObservedRunningTime="2025-12-01 20:20:17.017561871 +0000 UTC m=+936.944643278" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.020877 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.073434 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ktgl2" podStartSLOduration=4.497328155 podStartE2EDuration="33.073414064s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.009194834 +0000 UTC m=+905.936276261" lastFinishedPulling="2025-12-01 20:20:14.585280753 +0000 UTC m=+934.512362170" observedRunningTime="2025-12-01 20:20:17.064846425 +0000 UTC m=+936.991927842" watchObservedRunningTime="2025-12-01 20:20:17.073414064 +0000 UTC m=+937.000495471" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.137707 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-bzkqb" podStartSLOduration=4.218100821 podStartE2EDuration="33.137683361s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.06929143 +0000 UTC m=+905.996372847" lastFinishedPulling="2025-12-01 20:20:14.98887397 +0000 UTC m=+934.915955387" observedRunningTime="2025-12-01 20:20:17.120574324 +0000 UTC m=+937.047655741" watchObservedRunningTime="2025-12-01 20:20:17.137683361 +0000 UTC m=+937.064764768" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.162381 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" podStartSLOduration=4.5203872579999995 podStartE2EDuration="33.162351835s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.06419894 +0000 UTC m=+905.991280357" lastFinishedPulling="2025-12-01 20:20:14.706163517 +0000 UTC m=+934.633244934" observedRunningTime="2025-12-01 20:20:17.1500886 +0000 UTC m=+937.077170028" watchObservedRunningTime="2025-12-01 20:20:17.162351835 +0000 UTC m=+937.089433252" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.244469 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" podStartSLOduration=4.719394124 podStartE2EDuration="33.244432361s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.16167118 +0000 UTC m=+906.088752597" lastFinishedPulling="2025-12-01 20:20:14.686709417 +0000 UTC m=+934.613790834" observedRunningTime="2025-12-01 20:20:17.205964094 +0000 UTC m=+937.133045511" watchObservedRunningTime="2025-12-01 20:20:17.244432361 +0000 UTC m=+937.171513778" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.273075 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-4rphb" podStartSLOduration=4.294885721 podStartE2EDuration="33.273042999s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.706187964 +0000 UTC m=+905.633269381" lastFinishedPulling="2025-12-01 20:20:14.684345242 +0000 UTC m=+934.611426659" observedRunningTime="2025-12-01 20:20:17.237938487 +0000 UTC m=+937.165019904" watchObservedRunningTime="2025-12-01 20:20:17.273042999 +0000 UTC m=+937.200124416" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.321344 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2khxz" podStartSLOduration=4.716500949 podStartE2EDuration="34.321314124s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.187487535 +0000 UTC m=+905.114568942" lastFinishedPulling="2025-12-01 20:20:14.7923007 +0000 UTC m=+934.719382117" observedRunningTime="2025-12-01 20:20:17.28899345 +0000 UTC m=+937.216074857" watchObservedRunningTime="2025-12-01 20:20:17.321314124 +0000 UTC m=+937.248395541" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.345508 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.345688 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.369991 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.378799 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" podStartSLOduration=3.237105241 podStartE2EDuration="33.378770907s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.048008232 +0000 UTC m=+905.975089649" lastFinishedPulling="2025-12-01 20:20:16.189673898 +0000 UTC m=+936.116755315" observedRunningTime="2025-12-01 20:20:17.368913558 +0000 UTC m=+937.295994975" watchObservedRunningTime="2025-12-01 20:20:17.378770907 +0000 UTC m=+937.305852324" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.380212 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/980395ee-3c8d-41a7-9663-7bc33fb4cd46-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-qvhk9\" (UID: \"980395ee-3c8d-41a7-9663-7bc33fb4cd46\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.418603 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.476712 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-ccgjt" podStartSLOduration=5.242999273 podStartE2EDuration="34.47667754s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.554333968 +0000 UTC m=+905.481415395" lastFinishedPulling="2025-12-01 20:20:14.788012245 +0000 UTC m=+934.715093662" observedRunningTime="2025-12-01 20:20:17.472007554 +0000 UTC m=+937.399088971" watchObservedRunningTime="2025-12-01 20:20:17.47667754 +0000 UTC m=+937.403758957" Dec 01 20:20:17 crc kubenswrapper[4852]: I1201 20:20:17.477389 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" podStartSLOduration=5.452280661 podStartE2EDuration="34.477383203s" podCreationTimestamp="2025-12-01 20:19:43 +0000 UTC" firstStartedPulling="2025-12-01 20:19:45.559220181 +0000 UTC m=+905.486301608" lastFinishedPulling="2025-12-01 20:20:14.584322733 +0000 UTC m=+934.511404150" observedRunningTime="2025-12-01 20:20:17.417117881 +0000 UTC m=+937.344199308" watchObservedRunningTime="2025-12-01 20:20:17.477383203 +0000 UTC m=+937.404464620" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.032574 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" event={"ID":"2842a3ca-0708-4395-babd-b9dbdc1509d8","Type":"ContainerStarted","Data":"e599f2dd0393424ac63ba17bfb445751ea3a114883837e04010e2f70bdec957e"} Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.033066 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.044318 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" event={"ID":"4908b6e5-acd8-4754-877f-18a3b8897aa5","Type":"ContainerStarted","Data":"9c6685e0442c2de9db260a95a600e45c85a8b3017d08d4f284fcf3a42cd0e97c"} Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.045284 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.059304 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" podStartSLOduration=3.484055593 podStartE2EDuration="34.059274915s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.02150064 +0000 UTC m=+905.948582057" lastFinishedPulling="2025-12-01 20:20:16.596719962 +0000 UTC m=+936.523801379" observedRunningTime="2025-12-01 20:20:18.050478189 +0000 UTC m=+937.977559606" watchObservedRunningTime="2025-12-01 20:20:18.059274915 +0000 UTC m=+937.986356332" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.059494 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" event={"ID":"1ae195c5-0850-4ca5-85e4-abc7ac4d79dc","Type":"ContainerStarted","Data":"a29f9c132b6dd29450ace5dcdad011bb849a719f46297920d7b9179c49192536"} Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.059540 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.062796 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.066876 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-gtsjg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.067076 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-hncmj" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.067656 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-nh9t8" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.068130 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-blr2g" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.082949 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" podStartSLOduration=3.695936143 podStartE2EDuration="34.082920977s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.04856696 +0000 UTC m=+905.975648367" lastFinishedPulling="2025-12-01 20:20:16.435551784 +0000 UTC m=+936.362633201" observedRunningTime="2025-12-01 20:20:18.079892172 +0000 UTC m=+938.006973589" watchObservedRunningTime="2025-12-01 20:20:18.082920977 +0000 UTC m=+938.010002394" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.210580 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" podStartSLOduration=3.921168573 podStartE2EDuration="34.210551533s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="2025-12-01 20:19:46.150581652 +0000 UTC m=+906.077663069" lastFinishedPulling="2025-12-01 20:20:16.439964612 +0000 UTC m=+936.367046029" observedRunningTime="2025-12-01 20:20:18.207931011 +0000 UTC m=+938.135012428" watchObservedRunningTime="2025-12-01 20:20:18.210551533 +0000 UTC m=+938.137632950" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.341879 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.344200 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.362854 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.464680 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.464841 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ssvx\" (UniqueName: \"kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.464908 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.566437 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.566568 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.566607 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ssvx\" (UniqueName: \"kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.567629 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.567912 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.591580 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ssvx\" (UniqueName: \"kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx\") pod \"redhat-marketplace-pqbpg\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.673781 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:18 crc kubenswrapper[4852]: W1201 20:20:18.887987 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod980395ee_3c8d_41a7_9663_7bc33fb4cd46.slice/crio-ee305a63e1d07cf594fd91e74e42b060b50507160a64fed7dae30e148341c75b WatchSource:0}: Error finding container ee305a63e1d07cf594fd91e74e42b060b50507160a64fed7dae30e148341c75b: Status 404 returned error can't find the container with id ee305a63e1d07cf594fd91e74e42b060b50507160a64fed7dae30e148341c75b Dec 01 20:20:18 crc kubenswrapper[4852]: I1201 20:20:18.888650 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9"] Dec 01 20:20:19 crc kubenswrapper[4852]: I1201 20:20:19.015594 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:19 crc kubenswrapper[4852]: W1201 20:20:19.032692 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9127a421_1aec_41a6_a358_2744b581eab3.slice/crio-93f096ec785d4f5218185b1507ded01ad4a30164ecfe110ca7da49763a967e19 WatchSource:0}: Error finding container 93f096ec785d4f5218185b1507ded01ad4a30164ecfe110ca7da49763a967e19: Status 404 returned error can't find the container with id 93f096ec785d4f5218185b1507ded01ad4a30164ecfe110ca7da49763a967e19 Dec 01 20:20:19 crc kubenswrapper[4852]: I1201 20:20:19.074607 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerStarted","Data":"93f096ec785d4f5218185b1507ded01ad4a30164ecfe110ca7da49763a967e19"} Dec 01 20:20:19 crc kubenswrapper[4852]: I1201 20:20:19.088903 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" event={"ID":"980395ee-3c8d-41a7-9663-7bc33fb4cd46","Type":"ContainerStarted","Data":"ee305a63e1d07cf594fd91e74e42b060b50507160a64fed7dae30e148341c75b"} Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.099963 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerStarted","Data":"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9"} Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.103865 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerStarted","Data":"a749ca79f189c363c0fb7b0ebe99d904c2b9bca3cb6e4446abd01ece6594e326"} Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.106542 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" event={"ID":"980395ee-3c8d-41a7-9663-7bc33fb4cd46","Type":"ContainerStarted","Data":"960facb2720ae41b025d9f75e063b4545e4a5e39c777e017122ea6dac143a175"} Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.106959 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.157205 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lzq9l" podStartSLOduration=24.356529023 podStartE2EDuration="28.157179808s" podCreationTimestamp="2025-12-01 20:19:52 +0000 UTC" firstStartedPulling="2025-12-01 20:20:12.934937186 +0000 UTC m=+932.862018603" lastFinishedPulling="2025-12-01 20:20:16.735587981 +0000 UTC m=+936.662669388" observedRunningTime="2025-12-01 20:20:20.150318123 +0000 UTC m=+940.077399560" watchObservedRunningTime="2025-12-01 20:20:20.157179808 +0000 UTC m=+940.084261225" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.184401 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" podStartSLOduration=36.184371582 podStartE2EDuration="36.184371582s" podCreationTimestamp="2025-12-01 20:19:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:20:20.179148058 +0000 UTC m=+940.106229485" watchObservedRunningTime="2025-12-01 20:20:20.184371582 +0000 UTC m=+940.111453009" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.229534 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.229618 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.229682 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.230718 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.230804 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e" gracePeriod=600 Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.335438 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-7w8xr" Dec 01 20:20:20 crc kubenswrapper[4852]: I1201 20:20:20.603691 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446t9t22" Dec 01 20:20:21 crc kubenswrapper[4852]: I1201 20:20:21.114830 4852 generic.go:334] "Generic (PLEG): container finished" podID="9127a421-1aec-41a6-a358-2744b581eab3" containerID="b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9" exitCode=0 Dec 01 20:20:21 crc kubenswrapper[4852]: I1201 20:20:21.114948 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerDied","Data":"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9"} Dec 01 20:20:22 crc kubenswrapper[4852]: I1201 20:20:22.124257 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e" exitCode=0 Dec 01 20:20:22 crc kubenswrapper[4852]: I1201 20:20:22.124371 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e"} Dec 01 20:20:22 crc kubenswrapper[4852]: I1201 20:20:22.124849 4852 scope.go:117] "RemoveContainer" containerID="e8d8bb3a8de07d2b074b29dff0be56557f4e08cb6f204e86ee817eb0ea29ff0d" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.235854 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.236299 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.296644 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.683348 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.683926 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:23 crc kubenswrapper[4852]: I1201 20:20:23.731399 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.187233 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.197784 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.753275 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rpswz" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.753729 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-5mktq" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.754001 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-lz6m7" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.784345 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tdw8l" Dec 01 20:20:24 crc kubenswrapper[4852]: I1201 20:20:24.810500 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2phng" Dec 01 20:20:25 crc kubenswrapper[4852]: I1201 20:20:25.158093 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-rqqkl" Dec 01 20:20:25 crc kubenswrapper[4852]: I1201 20:20:25.243503 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lslxr" Dec 01 20:20:25 crc kubenswrapper[4852]: I1201 20:20:25.459497 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:20:26 crc kubenswrapper[4852]: I1201 20:20:26.063894 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:20:26 crc kubenswrapper[4852]: I1201 20:20:26.158152 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-95kl4" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="registry-server" containerID="cri-o://2284a0a6685420f29bbbb4be42f5dda16303ebdff5fc733340160a4e64401fdd" gracePeriod=2 Dec 01 20:20:27 crc kubenswrapper[4852]: I1201 20:20:27.164588 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lzq9l" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="registry-server" containerID="cri-o://a749ca79f189c363c0fb7b0ebe99d904c2b9bca3cb6e4446abd01ece6594e326" gracePeriod=2 Dec 01 20:20:27 crc kubenswrapper[4852]: I1201 20:20:27.429793 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-qvhk9" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.317900 4852 generic.go:334] "Generic (PLEG): container finished" podID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerID="2284a0a6685420f29bbbb4be42f5dda16303ebdff5fc733340160a4e64401fdd" exitCode=0 Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.318074 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerDied","Data":"2284a0a6685420f29bbbb4be42f5dda16303ebdff5fc733340160a4e64401fdd"} Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.335108 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.398291 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content\") pod \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.398362 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities\") pod \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.398654 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf4cx\" (UniqueName: \"kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx\") pod \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\" (UID: \"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e\") " Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.399597 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities" (OuterVolumeSpecName: "utilities") pod "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" (UID: "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.406669 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx" (OuterVolumeSpecName: "kube-api-access-jf4cx") pod "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" (UID: "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e"). InnerVolumeSpecName "kube-api-access-jf4cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.458229 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" (UID: "7a2be4e3-0283-4fdd-ae8e-0d23505cf94e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.501310 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf4cx\" (UniqueName: \"kubernetes.io/projected/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-kube-api-access-jf4cx\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.501362 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:31 crc kubenswrapper[4852]: I1201 20:20:31.501377 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.351667 4852 generic.go:334] "Generic (PLEG): container finished" podID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerID="a749ca79f189c363c0fb7b0ebe99d904c2b9bca3cb6e4446abd01ece6594e326" exitCode=0 Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.351747 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerDied","Data":"a749ca79f189c363c0fb7b0ebe99d904c2b9bca3cb6e4446abd01ece6594e326"} Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.360752 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32"} Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.371355 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-95kl4" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.371388 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-95kl4" event={"ID":"7a2be4e3-0283-4fdd-ae8e-0d23505cf94e","Type":"ContainerDied","Data":"94946c691344f284229023d9d1ceb23d9bb74e4fe3aa80bf2c254838fa86e181"} Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.371570 4852 scope.go:117] "RemoveContainer" containerID="2284a0a6685420f29bbbb4be42f5dda16303ebdff5fc733340160a4e64401fdd" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.392624 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerDied","Data":"98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30"} Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.392442 4852 generic.go:334] "Generic (PLEG): container finished" podID="9127a421-1aec-41a6-a358-2744b581eab3" containerID="98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30" exitCode=0 Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.416788 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.424207 4852 scope.go:117] "RemoveContainer" containerID="1f9b8b4b8aae1471c3fd8070e43184080720c3bb07594a00c345aa2362731234" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.428670 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-95kl4"] Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.513039 4852 scope.go:117] "RemoveContainer" containerID="f146db9e5cd23e597c7a93f9b07f36ab91ee8ed15ba734cbfffcffb893541283" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.531173 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.623739 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content\") pod \"ca3401d4-95a5-47a0-bedf-56036f56a819\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.624305 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jtmh\" (UniqueName: \"kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh\") pod \"ca3401d4-95a5-47a0-bedf-56036f56a819\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.624505 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities\") pod \"ca3401d4-95a5-47a0-bedf-56036f56a819\" (UID: \"ca3401d4-95a5-47a0-bedf-56036f56a819\") " Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.625767 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities" (OuterVolumeSpecName: "utilities") pod "ca3401d4-95a5-47a0-bedf-56036f56a819" (UID: "ca3401d4-95a5-47a0-bedf-56036f56a819"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.630945 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh" (OuterVolumeSpecName: "kube-api-access-8jtmh") pod "ca3401d4-95a5-47a0-bedf-56036f56a819" (UID: "ca3401d4-95a5-47a0-bedf-56036f56a819"). InnerVolumeSpecName "kube-api-access-8jtmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.679056 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca3401d4-95a5-47a0-bedf-56036f56a819" (UID: "ca3401d4-95a5-47a0-bedf-56036f56a819"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.725840 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.725887 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jtmh\" (UniqueName: \"kubernetes.io/projected/ca3401d4-95a5-47a0-bedf-56036f56a819-kube-api-access-8jtmh\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:32 crc kubenswrapper[4852]: I1201 20:20:32.725912 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3401d4-95a5-47a0-bedf-56036f56a819-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.410109 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerStarted","Data":"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3"} Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.413134 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lzq9l" Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.413130 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lzq9l" event={"ID":"ca3401d4-95a5-47a0-bedf-56036f56a819","Type":"ContainerDied","Data":"91cab9106ed1c2622473698efe0c88fef36b3c056002641bf5dc650c56a23fd0"} Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.413222 4852 scope.go:117] "RemoveContainer" containerID="a749ca79f189c363c0fb7b0ebe99d904c2b9bca3cb6e4446abd01ece6594e326" Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.432534 4852 scope.go:117] "RemoveContainer" containerID="1859f651d474e168d33cc2e38d7085e1d63afb4e49bf70bf06c4f0e3b41af9d4" Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.446409 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqbpg" podStartSLOduration=3.493840981 podStartE2EDuration="15.446377424s" podCreationTimestamp="2025-12-01 20:20:18 +0000 UTC" firstStartedPulling="2025-12-01 20:20:21.116658322 +0000 UTC m=+941.043739739" lastFinishedPulling="2025-12-01 20:20:33.069194765 +0000 UTC m=+952.996276182" observedRunningTime="2025-12-01 20:20:33.43864904 +0000 UTC m=+953.365730467" watchObservedRunningTime="2025-12-01 20:20:33.446377424 +0000 UTC m=+953.373458861" Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.462805 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.470167 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lzq9l"] Dec 01 20:20:33 crc kubenswrapper[4852]: I1201 20:20:33.485184 4852 scope.go:117] "RemoveContainer" containerID="31850711fc9d30f646c46bdf315c24a2aea4af0f00686dc98962694261724e27" Dec 01 20:20:34 crc kubenswrapper[4852]: I1201 20:20:34.335858 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" path="/var/lib/kubelet/pods/7a2be4e3-0283-4fdd-ae8e-0d23505cf94e/volumes" Dec 01 20:20:34 crc kubenswrapper[4852]: I1201 20:20:34.336732 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" path="/var/lib/kubelet/pods/ca3401d4-95a5-47a0-bedf-56036f56a819/volumes" Dec 01 20:20:38 crc kubenswrapper[4852]: I1201 20:20:38.676108 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:38 crc kubenswrapper[4852]: I1201 20:20:38.677008 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:38 crc kubenswrapper[4852]: I1201 20:20:38.758095 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:39 crc kubenswrapper[4852]: I1201 20:20:39.532055 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:39 crc kubenswrapper[4852]: I1201 20:20:39.863521 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.479959 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqbpg" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="registry-server" containerID="cri-o://c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3" gracePeriod=2 Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.917990 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919014 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919068 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919091 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="extract-utilities" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919100 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="extract-utilities" Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919172 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919181 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919219 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="extract-utilities" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919227 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="extract-utilities" Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919237 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="extract-content" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919246 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="extract-content" Dec 01 20:20:41 crc kubenswrapper[4852]: E1201 20:20:41.919258 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="extract-content" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919264 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="extract-content" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919478 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca3401d4-95a5-47a0-bedf-56036f56a819" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.919502 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a2be4e3-0283-4fdd-ae8e-0d23505cf94e" containerName="registry-server" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.922669 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.926551 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.926894 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.927065 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.927359 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-wl92g" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.932597 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.984364 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.986075 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.988672 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px7j6\" (UniqueName: \"kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.988875 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.992590 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 01 20:20:41 crc kubenswrapper[4852]: I1201 20:20:41.999159 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.090623 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.090702 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px7j6\" (UniqueName: \"kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.090792 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.090819 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48779\" (UniqueName: \"kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.090869 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.092005 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.111399 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px7j6\" (UniqueName: \"kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6\") pod \"dnsmasq-dns-557f57d995-mnfbx\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.192877 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48779\" (UniqueName: \"kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.192981 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.193018 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.193988 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.194586 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.217681 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48779\" (UniqueName: \"kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779\") pod \"dnsmasq-dns-766fdc659c-jbgfb\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.250386 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.310398 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.365233 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.398713 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content\") pod \"9127a421-1aec-41a6-a358-2744b581eab3\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.398773 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities\") pod \"9127a421-1aec-41a6-a358-2744b581eab3\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.400597 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities" (OuterVolumeSpecName: "utilities") pod "9127a421-1aec-41a6-a358-2744b581eab3" (UID: "9127a421-1aec-41a6-a358-2744b581eab3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.423883 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9127a421-1aec-41a6-a358-2744b581eab3" (UID: "9127a421-1aec-41a6-a358-2744b581eab3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.489534 4852 generic.go:334] "Generic (PLEG): container finished" podID="9127a421-1aec-41a6-a358-2744b581eab3" containerID="c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3" exitCode=0 Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.489597 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerDied","Data":"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3"} Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.489636 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqbpg" event={"ID":"9127a421-1aec-41a6-a358-2744b581eab3","Type":"ContainerDied","Data":"93f096ec785d4f5218185b1507ded01ad4a30164ecfe110ca7da49763a967e19"} Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.489661 4852 scope.go:117] "RemoveContainer" containerID="c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.489828 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqbpg" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.499805 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ssvx\" (UniqueName: \"kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx\") pod \"9127a421-1aec-41a6-a358-2744b581eab3\" (UID: \"9127a421-1aec-41a6-a358-2744b581eab3\") " Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.500218 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.500233 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9127a421-1aec-41a6-a358-2744b581eab3-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.508506 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx" (OuterVolumeSpecName: "kube-api-access-8ssvx") pod "9127a421-1aec-41a6-a358-2744b581eab3" (UID: "9127a421-1aec-41a6-a358-2744b581eab3"). InnerVolumeSpecName "kube-api-access-8ssvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.520899 4852 scope.go:117] "RemoveContainer" containerID="98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.540685 4852 scope.go:117] "RemoveContainer" containerID="b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.568048 4852 scope.go:117] "RemoveContainer" containerID="c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3" Dec 01 20:20:42 crc kubenswrapper[4852]: E1201 20:20:42.568604 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3\": container with ID starting with c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3 not found: ID does not exist" containerID="c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.568672 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3"} err="failed to get container status \"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3\": rpc error: code = NotFound desc = could not find container \"c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3\": container with ID starting with c85e7a6a7683fc5bff2531657414cfa8eb5eceaab2461e690ab56f7aa60f0ab3 not found: ID does not exist" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.568722 4852 scope.go:117] "RemoveContainer" containerID="98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30" Dec 01 20:20:42 crc kubenswrapper[4852]: E1201 20:20:42.569095 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30\": container with ID starting with 98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30 not found: ID does not exist" containerID="98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.569114 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30"} err="failed to get container status \"98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30\": rpc error: code = NotFound desc = could not find container \"98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30\": container with ID starting with 98c0fca8d12b54b10486f65a7645714323840836d5b6ee3fb4dbc67189810f30 not found: ID does not exist" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.569127 4852 scope.go:117] "RemoveContainer" containerID="b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9" Dec 01 20:20:42 crc kubenswrapper[4852]: E1201 20:20:42.569341 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9\": container with ID starting with b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9 not found: ID does not exist" containerID="b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.569363 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9"} err="failed to get container status \"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9\": rpc error: code = NotFound desc = could not find container \"b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9\": container with ID starting with b1a2871a035446867a77e03bed38f64d9a928ca11fda7c9773ef403fb1cd03b9 not found: ID does not exist" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.584309 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:20:42 crc kubenswrapper[4852]: W1201 20:20:42.588571 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3600b37a_414e_4543_b290_2c47ca950f8e.slice/crio-c614142e341ce24f530f894c294f53ed2d08199992121b9c925fbd1b8671dfc1 WatchSource:0}: Error finding container c614142e341ce24f530f894c294f53ed2d08199992121b9c925fbd1b8671dfc1: Status 404 returned error can't find the container with id c614142e341ce24f530f894c294f53ed2d08199992121b9c925fbd1b8671dfc1 Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.593416 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.602773 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ssvx\" (UniqueName: \"kubernetes.io/projected/9127a421-1aec-41a6-a358-2744b581eab3-kube-api-access-8ssvx\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.739272 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:20:42 crc kubenswrapper[4852]: W1201 20:20:42.744638 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fc788be_6cf7_477e_8c77_ca11daf3bcc5.slice/crio-c79f25c20f01e9fa050021db04d8ee4f9fa34673d71eb5e0f6f95e155540e852 WatchSource:0}: Error finding container c79f25c20f01e9fa050021db04d8ee4f9fa34673d71eb5e0f6f95e155540e852: Status 404 returned error can't find the container with id c79f25c20f01e9fa050021db04d8ee4f9fa34673d71eb5e0f6f95e155540e852 Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.827797 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:42 crc kubenswrapper[4852]: I1201 20:20:42.834474 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqbpg"] Dec 01 20:20:43 crc kubenswrapper[4852]: I1201 20:20:43.503712 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" event={"ID":"3600b37a-414e-4543-b290-2c47ca950f8e","Type":"ContainerStarted","Data":"c614142e341ce24f530f894c294f53ed2d08199992121b9c925fbd1b8671dfc1"} Dec 01 20:20:43 crc kubenswrapper[4852]: I1201 20:20:43.505341 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" event={"ID":"0fc788be-6cf7-477e-8c77-ca11daf3bcc5","Type":"ContainerStarted","Data":"c79f25c20f01e9fa050021db04d8ee4f9fa34673d71eb5e0f6f95e155540e852"} Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.338245 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9127a421-1aec-41a6-a358-2744b581eab3" path="/var/lib/kubelet/pods/9127a421-1aec-41a6-a358-2744b581eab3/volumes" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.709549 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.744492 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:20:44 crc kubenswrapper[4852]: E1201 20:20:44.744955 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="extract-content" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.744982 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="extract-content" Dec 01 20:20:44 crc kubenswrapper[4852]: E1201 20:20:44.744997 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="extract-utilities" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.745004 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="extract-utilities" Dec 01 20:20:44 crc kubenswrapper[4852]: E1201 20:20:44.745019 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="registry-server" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.745025 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="registry-server" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.745200 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="9127a421-1aec-41a6-a358-2744b581eab3" containerName="registry-server" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.746210 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.755743 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.945801 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.945907 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:44 crc kubenswrapper[4852]: I1201 20:20:44.945995 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55wxm\" (UniqueName: \"kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.048530 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.048615 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.048686 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55wxm\" (UniqueName: \"kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.050375 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.051073 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.058162 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.085004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55wxm\" (UniqueName: \"kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm\") pod \"dnsmasq-dns-5cd665b7c7-6f9ml\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.102564 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.110936 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.126426 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.253640 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.253723 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7kvk\" (UniqueName: \"kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.253779 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.355527 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7kvk\" (UniqueName: \"kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.355597 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.355700 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.356622 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.356850 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.374601 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.391119 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7kvk\" (UniqueName: \"kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk\") pod \"dnsmasq-dns-8446fd7c75-8l6rw\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.448918 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.721920 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.911217 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.925796 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.928550 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.928912 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-shsg6" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.930288 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.930553 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.930656 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.930791 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.930895 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 20:20:45 crc kubenswrapper[4852]: I1201 20:20:45.931023 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068127 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068209 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068270 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068320 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068347 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068371 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068400 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068474 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068505 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068536 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwqsc\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.068557 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: W1201 20:20:46.068797 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3741c48a_18da_4405_980e_954b30ea35a4.slice/crio-f38164978fd7fb3249a52029c3adcb8c03267c019e8d9422400d6754bae5842e WatchSource:0}: Error finding container f38164978fd7fb3249a52029c3adcb8c03267c019e8d9422400d6754bae5842e: Status 404 returned error can't find the container with id f38164978fd7fb3249a52029c3adcb8c03267c019e8d9422400d6754bae5842e Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.070196 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170344 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170852 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170879 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170910 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170959 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.170984 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171015 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwqsc\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171045 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171086 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171113 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171136 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171203 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.171617 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.173406 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.174022 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.175544 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.178832 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.179404 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.179606 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.181157 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.186347 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.201452 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwqsc\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.207624 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.233858 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.235525 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.241235 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.247445 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.247779 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.250727 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.252106 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jd5km" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.252276 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.252425 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.254358 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.263505 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376385 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376475 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376531 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm5ff\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376574 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376624 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376645 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376669 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376711 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376736 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376765 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.376801 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478443 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478532 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm5ff\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478564 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478603 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478636 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478663 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478713 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478756 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478794 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.478867 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.479419 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.479453 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.480117 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.481533 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.485925 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.486884 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.489848 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.490646 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.491486 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.500249 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.500838 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm5ff\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.511929 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " pod="openstack/rabbitmq-server-0" Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.571001 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" event={"ID":"49a9a990-85b6-442f-aaa3-43f38451ee93","Type":"ContainerStarted","Data":"951d7b623f82db3766e57b63c50f1884334bd39e774aaf5899a7e3b1507ded57"} Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.573050 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" event={"ID":"3741c48a-18da-4405-980e-954b30ea35a4","Type":"ContainerStarted","Data":"f38164978fd7fb3249a52029c3adcb8c03267c019e8d9422400d6754bae5842e"} Dec 01 20:20:46 crc kubenswrapper[4852]: I1201 20:20:46.587734 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.840166 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.843703 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.848810 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nd6g2" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.851906 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.852538 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.852937 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.855292 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 01 20:20:47 crc kubenswrapper[4852]: I1201 20:20:47.856565 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.035703 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcgdl\" (UniqueName: \"kubernetes.io/projected/5a512edf-0808-47a9-91dd-81da3cf1cda9-kube-api-access-qcgdl\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.035803 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.035830 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.036020 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.036081 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.036113 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.036188 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.036215 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138443 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138525 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138567 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138626 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138656 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138683 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcgdl\" (UniqueName: \"kubernetes.io/projected/5a512edf-0808-47a9-91dd-81da3cf1cda9-kube-api-access-qcgdl\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138726 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.138745 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.139048 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.139104 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.139676 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.140549 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.141096 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a512edf-0808-47a9-91dd-81da3cf1cda9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.151281 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.159874 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a512edf-0808-47a9-91dd-81da3cf1cda9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.177857 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.180584 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcgdl\" (UniqueName: \"kubernetes.io/projected/5a512edf-0808-47a9-91dd-81da3cf1cda9-kube-api-access-qcgdl\") pod \"openstack-galera-0\" (UID: \"5a512edf-0808-47a9-91dd-81da3cf1cda9\") " pod="openstack/openstack-galera-0" Dec 01 20:20:48 crc kubenswrapper[4852]: I1201 20:20:48.479825 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.351183 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.354162 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.358605 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.359524 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.360468 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-6ds5d" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.364302 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.370973 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468036 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468109 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468531 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg5k6\" (UniqueName: \"kubernetes.io/projected/775ec07e-8dd8-47f7-94f1-4c5355335a82-kube-api-access-lg5k6\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468645 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468680 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468739 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468796 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.468823 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571136 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571220 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571262 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571297 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571343 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg5k6\" (UniqueName: \"kubernetes.io/projected/775ec07e-8dd8-47f7-94f1-4c5355335a82-kube-api-access-lg5k6\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571383 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571435 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.571489 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.572443 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.572924 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.573608 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.573648 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.573806 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ec07e-8dd8-47f7-94f1-4c5355335a82-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.590927 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.590759 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/775ec07e-8dd8-47f7-94f1-4c5355335a82-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.603245 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg5k6\" (UniqueName: \"kubernetes.io/projected/775ec07e-8dd8-47f7-94f1-4c5355335a82-kube-api-access-lg5k6\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.622097 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"775ec07e-8dd8-47f7-94f1-4c5355335a82\") " pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.661589 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.673646 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.677913 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.677941 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4knbl" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.678044 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.678178 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.688995 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.775634 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.776352 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.776390 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-kolla-config\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.776442 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8dkh\" (UniqueName: \"kubernetes.io/projected/5724232f-c6e6-4356-b4b2-a622191bedaf-kube-api-access-p8dkh\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.776482 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-config-data\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.877690 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8dkh\" (UniqueName: \"kubernetes.io/projected/5724232f-c6e6-4356-b4b2-a622191bedaf-kube-api-access-p8dkh\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.877763 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-config-data\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.877825 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.877892 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.877935 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-kolla-config\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.878869 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-kolla-config\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.879438 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5724232f-c6e6-4356-b4b2-a622191bedaf-config-data\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.884685 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.885852 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5724232f-c6e6-4356-b4b2-a622191bedaf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.900817 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8dkh\" (UniqueName: \"kubernetes.io/projected/5724232f-c6e6-4356-b4b2-a622191bedaf-kube-api-access-p8dkh\") pod \"memcached-0\" (UID: \"5724232f-c6e6-4356-b4b2-a622191bedaf\") " pod="openstack/memcached-0" Dec 01 20:20:49 crc kubenswrapper[4852]: I1201 20:20:49.996840 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.721117 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.723108 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.726492 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-w5gzv" Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.737124 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.817480 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz2zz\" (UniqueName: \"kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz\") pod \"kube-state-metrics-0\" (UID: \"b634d10f-beff-4ef8-8602-8e4acb8a5a4b\") " pod="openstack/kube-state-metrics-0" Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.919776 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz2zz\" (UniqueName: \"kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz\") pod \"kube-state-metrics-0\" (UID: \"b634d10f-beff-4ef8-8602-8e4acb8a5a4b\") " pod="openstack/kube-state-metrics-0" Dec 01 20:20:51 crc kubenswrapper[4852]: I1201 20:20:51.952878 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz2zz\" (UniqueName: \"kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz\") pod \"kube-state-metrics-0\" (UID: \"b634d10f-beff-4ef8-8602-8e4acb8a5a4b\") " pod="openstack/kube-state-metrics-0" Dec 01 20:20:52 crc kubenswrapper[4852]: I1201 20:20:52.047522 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.942426 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8kwmk"] Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.944162 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.947834 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.948028 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-7m9jd" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.948192 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.959965 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-rwcdk"] Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.962806 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:54 crc kubenswrapper[4852]: I1201 20:20:54.972057 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8kwmk"] Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.004556 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rwcdk"] Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005036 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-etc-ovs\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005185 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-run\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005215 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-lib\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005238 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-scripts\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005269 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-log\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005294 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-ovn-controller-tls-certs\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005334 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005360 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-log-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005387 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t7m9\" (UniqueName: \"kubernetes.io/projected/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-kube-api-access-8t7m9\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ef29299-3043-4921-b77b-07416d89ed96-scripts\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005472 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-combined-ca-bundle\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005502 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.005527 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m77kj\" (UniqueName: \"kubernetes.io/projected/5ef29299-3043-4921-b77b-07416d89ed96-kube-api-access-m77kj\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107718 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107779 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-log-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107808 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t7m9\" (UniqueName: \"kubernetes.io/projected/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-kube-api-access-8t7m9\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107840 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ef29299-3043-4921-b77b-07416d89ed96-scripts\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107895 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-combined-ca-bundle\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107924 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107954 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m77kj\" (UniqueName: \"kubernetes.io/projected/5ef29299-3043-4921-b77b-07416d89ed96-kube-api-access-m77kj\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.107993 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-etc-ovs\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108030 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-run\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108049 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-lib\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108069 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-scripts\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108097 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-log\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108117 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-ovn-controller-tls-certs\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108595 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-log-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108789 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-run\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108815 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-log\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108831 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-var-lib\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.108928 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.109284 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ef29299-3043-4921-b77b-07416d89ed96-var-run-ovn\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.112529 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-etc-ovs\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.114149 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ef29299-3043-4921-b77b-07416d89ed96-scripts\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.114879 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-scripts\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.120859 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-combined-ca-bundle\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.122719 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef29299-3043-4921-b77b-07416d89ed96-ovn-controller-tls-certs\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.129497 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m77kj\" (UniqueName: \"kubernetes.io/projected/5ef29299-3043-4921-b77b-07416d89ed96-kube-api-access-m77kj\") pod \"ovn-controller-8kwmk\" (UID: \"5ef29299-3043-4921-b77b-07416d89ed96\") " pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.130164 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t7m9\" (UniqueName: \"kubernetes.io/projected/d0b85f35-5e0a-45ed-a162-02c81ffbdedb-kube-api-access-8t7m9\") pod \"ovn-controller-ovs-rwcdk\" (UID: \"d0b85f35-5e0a-45ed-a162-02c81ffbdedb\") " pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.276924 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.307680 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.837285 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.840202 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.843236 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.843277 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.843533 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.844592 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-66kk9" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.844819 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 01 20:20:55 crc kubenswrapper[4852]: I1201 20:20:55.845204 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023002 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023123 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023168 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023235 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023278 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023316 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023387 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.023411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bbqh\" (UniqueName: \"kubernetes.io/projected/3e569dc0-0de8-47cf-a1d3-1e649efde4af-kube-api-access-9bbqh\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126403 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126532 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126590 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126617 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bbqh\" (UniqueName: \"kubernetes.io/projected/3e569dc0-0de8-47cf-a1d3-1e649efde4af-kube-api-access-9bbqh\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126653 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126782 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.126848 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.127471 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.127913 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.128260 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.131620 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e569dc0-0de8-47cf-a1d3-1e649efde4af-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.138127 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.142391 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.145637 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e569dc0-0de8-47cf-a1d3-1e649efde4af-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.153605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.155112 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bbqh\" (UniqueName: \"kubernetes.io/projected/3e569dc0-0de8-47cf-a1d3-1e649efde4af-kube-api-access-9bbqh\") pod \"ovsdbserver-nb-0\" (UID: \"3e569dc0-0de8-47cf-a1d3-1e649efde4af\") " pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:56 crc kubenswrapper[4852]: I1201 20:20:56.165906 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.519795 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.526718 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.539068 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.539141 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-z2pvw" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.538976 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.539798 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.544746 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.693981 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694048 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694298 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b682z\" (UniqueName: \"kubernetes.io/projected/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-kube-api-access-b682z\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694391 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694444 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694482 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694559 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.694631 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796399 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796486 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796546 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b682z\" (UniqueName: \"kubernetes.io/projected/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-kube-api-access-b682z\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796598 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796632 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796660 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796696 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.796729 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.797258 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.797774 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.798200 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.798721 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.820135 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.840855 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.844372 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b682z\" (UniqueName: \"kubernetes.io/projected/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-kube-api-access-b682z\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.845073 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff6ee0b-6797-494c-8166-88c5cc7cf3fe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.858757 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe\") " pod="openstack/ovsdbserver-sb-0" Dec 01 20:20:58 crc kubenswrapper[4852]: I1201 20:20:58.860820 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:20:59 crc kubenswrapper[4852]: I1201 20:20:59.164435 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.772948 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.773687 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-px7j6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-557f57d995-mnfbx_openstack(0fc788be-6cf7-477e-8c77-ca11daf3bcc5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.776534 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" podUID="0fc788be-6cf7-477e-8c77-ca11daf3bcc5" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.798041 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.798427 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-48779,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-766fdc659c-jbgfb_openstack(3600b37a-414e-4543-b290-2c47ca950f8e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:21:00 crc kubenswrapper[4852]: E1201 20:21:00.805323 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" podUID="3600b37a-414e-4543-b290-2c47ca950f8e" Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.364982 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.411816 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:21:01 crc kubenswrapper[4852]: W1201 20:21:01.413306 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod775ec07e_8dd8_47f7_94f1_4c5355335a82.slice/crio-91a10b61adc261287f2f96fcbf811dfff7b4e88629051ac713d15544a17f7113 WatchSource:0}: Error finding container 91a10b61adc261287f2f96fcbf811dfff7b4e88629051ac713d15544a17f7113: Status 404 returned error can't find the container with id 91a10b61adc261287f2f96fcbf811dfff7b4e88629051ac713d15544a17f7113 Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.471747 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.713040 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerStarted","Data":"b38f38ef4ea0bcea6c9f8a61212ccaafcface71f3b6220fd509568b0b97e6745"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.714781 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"775ec07e-8dd8-47f7-94f1-4c5355335a82","Type":"ContainerStarted","Data":"91a10b61adc261287f2f96fcbf811dfff7b4e88629051ac713d15544a17f7113"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.716758 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a512edf-0808-47a9-91dd-81da3cf1cda9","Type":"ContainerStarted","Data":"a1bfa2eb332fab4b17384e9b080efc221c271fad097f226aec1b4f04feed4534"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.721870 4852 generic.go:334] "Generic (PLEG): container finished" podID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerID="1b8cb127179104119957457a3084ceb076970412542d856805977a819874d907" exitCode=0 Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.721945 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" event={"ID":"49a9a990-85b6-442f-aaa3-43f38451ee93","Type":"ContainerDied","Data":"1b8cb127179104119957457a3084ceb076970412542d856805977a819874d907"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.732025 4852 generic.go:334] "Generic (PLEG): container finished" podID="3741c48a-18da-4405-980e-954b30ea35a4" containerID="b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a" exitCode=0 Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.732193 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" event={"ID":"3741c48a-18da-4405-980e-954b30ea35a4","Type":"ContainerDied","Data":"b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.736902 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerStarted","Data":"0a5b1258fd18e8b0557aad2704a8c42b7b55d99eeee5a088173228f858651bac"} Dec 01 20:21:01 crc kubenswrapper[4852]: I1201 20:21:01.879662 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8kwmk"] Dec 01 20:21:01 crc kubenswrapper[4852]: W1201 20:21:01.901656 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ef29299_3043_4921_b77b_07416d89ed96.slice/crio-f635d81610b8003cf86353a3a214b2c250d184161d8e3cdc6fca57bb31c5c3c9 WatchSource:0}: Error finding container f635d81610b8003cf86353a3a214b2c250d184161d8e3cdc6fca57bb31c5c3c9: Status 404 returned error can't find the container with id f635d81610b8003cf86353a3a214b2c250d184161d8e3cdc6fca57bb31c5c3c9 Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.200821 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.231927 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:21:02 crc kubenswrapper[4852]: W1201 20:21:02.236246 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb634d10f_beff_4ef8_8602_8e4acb8a5a4b.slice/crio-b7bea182960a30bd09c1ea224e27201a75bf99abaeaef395419c2f63f3bffe7d WatchSource:0}: Error finding container b7bea182960a30bd09c1ea224e27201a75bf99abaeaef395419c2f63f3bffe7d: Status 404 returned error can't find the container with id b7bea182960a30bd09c1ea224e27201a75bf99abaeaef395419c2f63f3bffe7d Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.244424 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.245829 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.296216 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48779\" (UniqueName: \"kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779\") pod \"3600b37a-414e-4543-b290-2c47ca950f8e\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.296280 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px7j6\" (UniqueName: \"kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6\") pod \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.296343 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config\") pod \"3600b37a-414e-4543-b290-2c47ca950f8e\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.296477 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config\") pod \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\" (UID: \"0fc788be-6cf7-477e-8c77-ca11daf3bcc5\") " Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.296549 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc\") pod \"3600b37a-414e-4543-b290-2c47ca950f8e\" (UID: \"3600b37a-414e-4543-b290-2c47ca950f8e\") " Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.297725 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config" (OuterVolumeSpecName: "config") pod "3600b37a-414e-4543-b290-2c47ca950f8e" (UID: "3600b37a-414e-4543-b290-2c47ca950f8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.297785 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3600b37a-414e-4543-b290-2c47ca950f8e" (UID: "3600b37a-414e-4543-b290-2c47ca950f8e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.298098 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config" (OuterVolumeSpecName: "config") pod "0fc788be-6cf7-477e-8c77-ca11daf3bcc5" (UID: "0fc788be-6cf7-477e-8c77-ca11daf3bcc5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.303108 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779" (OuterVolumeSpecName: "kube-api-access-48779") pod "3600b37a-414e-4543-b290-2c47ca950f8e" (UID: "3600b37a-414e-4543-b290-2c47ca950f8e"). InnerVolumeSpecName "kube-api-access-48779". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.306730 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6" (OuterVolumeSpecName: "kube-api-access-px7j6") pod "0fc788be-6cf7-477e-8c77-ca11daf3bcc5" (UID: "0fc788be-6cf7-477e-8c77-ca11daf3bcc5"). InnerVolumeSpecName "kube-api-access-px7j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.343712 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.401749 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.401791 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48779\" (UniqueName: \"kubernetes.io/projected/3600b37a-414e-4543-b290-2c47ca950f8e-kube-api-access-48779\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.401805 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px7j6\" (UniqueName: \"kubernetes.io/projected/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-kube-api-access-px7j6\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.401816 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3600b37a-414e-4543-b290-2c47ca950f8e-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.401827 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc788be-6cf7-477e-8c77-ca11daf3bcc5-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.406103 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rwcdk"] Dec 01 20:21:02 crc kubenswrapper[4852]: W1201 20:21:02.410171 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0b85f35_5e0a_45ed_a162_02c81ffbdedb.slice/crio-c12b3fb238beb9ac747826c28dd090b239a8e6441f1c4a3e809f941904f3d3d4 WatchSource:0}: Error finding container c12b3fb238beb9ac747826c28dd090b239a8e6441f1c4a3e809f941904f3d3d4: Status 404 returned error can't find the container with id c12b3fb238beb9ac747826c28dd090b239a8e6441f1c4a3e809f941904f3d3d4 Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.756400 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" event={"ID":"49a9a990-85b6-442f-aaa3-43f38451ee93","Type":"ContainerStarted","Data":"439ca40be44cb2d94fd186ad6eff20ff2a4ff91a378665c5055a2b040c9715f1"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.756520 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.758230 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b634d10f-beff-4ef8-8602-8e4acb8a5a4b","Type":"ContainerStarted","Data":"b7bea182960a30bd09c1ea224e27201a75bf99abaeaef395419c2f63f3bffe7d"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.761470 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rwcdk" event={"ID":"d0b85f35-5e0a-45ed-a162-02c81ffbdedb","Type":"ContainerStarted","Data":"c12b3fb238beb9ac747826c28dd090b239a8e6441f1c4a3e809f941904f3d3d4"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.763478 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" event={"ID":"0fc788be-6cf7-477e-8c77-ca11daf3bcc5","Type":"ContainerDied","Data":"c79f25c20f01e9fa050021db04d8ee4f9fa34673d71eb5e0f6f95e155540e852"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.763685 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-mnfbx" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.765489 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk" event={"ID":"5ef29299-3043-4921-b77b-07416d89ed96","Type":"ContainerStarted","Data":"f635d81610b8003cf86353a3a214b2c250d184161d8e3cdc6fca57bb31c5c3c9"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.766775 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe","Type":"ContainerStarted","Data":"1c2c7e08a8d4dc003d2234de60b440e73c33af592ea74f00b530e3c3aa8b0c89"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.768298 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5724232f-c6e6-4356-b4b2-a622191bedaf","Type":"ContainerStarted","Data":"cfd8a1c03d5f3c55eb4f53dd71a620356e6fac2a4de9176660e5b0fbddc4c826"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.781351 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" event={"ID":"3741c48a-18da-4405-980e-954b30ea35a4","Type":"ContainerStarted","Data":"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.782346 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.783784 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" podStartSLOduration=3.206363088 podStartE2EDuration="18.783744474s" podCreationTimestamp="2025-12-01 20:20:44 +0000 UTC" firstStartedPulling="2025-12-01 20:20:45.738867649 +0000 UTC m=+965.665949066" lastFinishedPulling="2025-12-01 20:21:01.316249035 +0000 UTC m=+981.243330452" observedRunningTime="2025-12-01 20:21:02.779263613 +0000 UTC m=+982.706345030" watchObservedRunningTime="2025-12-01 20:21:02.783744474 +0000 UTC m=+982.710825891" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.786443 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" event={"ID":"3600b37a-414e-4543-b290-2c47ca950f8e","Type":"ContainerDied","Data":"c614142e341ce24f530f894c294f53ed2d08199992121b9c925fbd1b8671dfc1"} Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.786515 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-jbgfb" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.822054 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.859550 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-mnfbx"] Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.872853 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" podStartSLOduration=2.6518889249999997 podStartE2EDuration="17.8728165s" podCreationTimestamp="2025-12-01 20:20:45 +0000 UTC" firstStartedPulling="2025-12-01 20:20:46.079621133 +0000 UTC m=+966.006702550" lastFinishedPulling="2025-12-01 20:21:01.300548708 +0000 UTC m=+981.227630125" observedRunningTime="2025-12-01 20:21:02.848417318 +0000 UTC m=+982.775498735" watchObservedRunningTime="2025-12-01 20:21:02.8728165 +0000 UTC m=+982.799897927" Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.910938 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:21:02 crc kubenswrapper[4852]: I1201 20:21:02.920209 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-jbgfb"] Dec 01 20:21:03 crc kubenswrapper[4852]: I1201 20:21:03.430057 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 20:21:04 crc kubenswrapper[4852]: I1201 20:21:04.335139 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc788be-6cf7-477e-8c77-ca11daf3bcc5" path="/var/lib/kubelet/pods/0fc788be-6cf7-477e-8c77-ca11daf3bcc5/volumes" Dec 01 20:21:04 crc kubenswrapper[4852]: I1201 20:21:04.335636 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3600b37a-414e-4543-b290-2c47ca950f8e" path="/var/lib/kubelet/pods/3600b37a-414e-4543-b290-2c47ca950f8e/volumes" Dec 01 20:21:07 crc kubenswrapper[4852]: I1201 20:21:07.832588 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e569dc0-0de8-47cf-a1d3-1e649efde4af","Type":"ContainerStarted","Data":"5ea4b4b94f601e8db912241ab192755756c11048ae19e99b9e56204cb5e9f25d"} Dec 01 20:21:10 crc kubenswrapper[4852]: I1201 20:21:10.376659 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:21:10 crc kubenswrapper[4852]: I1201 20:21:10.453476 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:21:10 crc kubenswrapper[4852]: I1201 20:21:10.636091 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:21:10 crc kubenswrapper[4852]: I1201 20:21:10.863193 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="dnsmasq-dns" containerID="cri-o://439ca40be44cb2d94fd186ad6eff20ff2a4ff91a378665c5055a2b040c9715f1" gracePeriod=10 Dec 01 20:21:11 crc kubenswrapper[4852]: I1201 20:21:11.897164 4852 generic.go:334] "Generic (PLEG): container finished" podID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerID="439ca40be44cb2d94fd186ad6eff20ff2a4ff91a378665c5055a2b040c9715f1" exitCode=0 Dec 01 20:21:11 crc kubenswrapper[4852]: I1201 20:21:11.897229 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" event={"ID":"49a9a990-85b6-442f-aaa3-43f38451ee93","Type":"ContainerDied","Data":"439ca40be44cb2d94fd186ad6eff20ff2a4ff91a378665c5055a2b040c9715f1"} Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.132314 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.250910 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc\") pod \"49a9a990-85b6-442f-aaa3-43f38451ee93\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.251418 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55wxm\" (UniqueName: \"kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm\") pod \"49a9a990-85b6-442f-aaa3-43f38451ee93\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.251511 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") pod \"49a9a990-85b6-442f-aaa3-43f38451ee93\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.257106 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm" (OuterVolumeSpecName: "kube-api-access-55wxm") pod "49a9a990-85b6-442f-aaa3-43f38451ee93" (UID: "49a9a990-85b6-442f-aaa3-43f38451ee93"). InnerVolumeSpecName "kube-api-access-55wxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:13 crc kubenswrapper[4852]: E1201 20:21:13.291327 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config podName:49a9a990-85b6-442f-aaa3-43f38451ee93 nodeName:}" failed. No retries permitted until 2025-12-01 20:21:13.791287115 +0000 UTC m=+993.718368532 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config") pod "49a9a990-85b6-442f-aaa3-43f38451ee93" (UID: "49a9a990-85b6-442f-aaa3-43f38451ee93") : error deleting /var/lib/kubelet/pods/49a9a990-85b6-442f-aaa3-43f38451ee93/volume-subpaths: remove /var/lib/kubelet/pods/49a9a990-85b6-442f-aaa3-43f38451ee93/volume-subpaths: no such file or directory Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.291798 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "49a9a990-85b6-442f-aaa3-43f38451ee93" (UID: "49a9a990-85b6-442f-aaa3-43f38451ee93"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.353968 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.354013 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55wxm\" (UniqueName: \"kubernetes.io/projected/49a9a990-85b6-442f-aaa3-43f38451ee93-kube-api-access-55wxm\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.867692 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") pod \"49a9a990-85b6-442f-aaa3-43f38451ee93\" (UID: \"49a9a990-85b6-442f-aaa3-43f38451ee93\") " Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.868921 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config" (OuterVolumeSpecName: "config") pod "49a9a990-85b6-442f-aaa3-43f38451ee93" (UID: "49a9a990-85b6-442f-aaa3-43f38451ee93"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.918831 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" event={"ID":"49a9a990-85b6-442f-aaa3-43f38451ee93","Type":"ContainerDied","Data":"951d7b623f82db3766e57b63c50f1884334bd39e774aaf5899a7e3b1507ded57"} Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.918910 4852 scope.go:117] "RemoveContainer" containerID="439ca40be44cb2d94fd186ad6eff20ff2a4ff91a378665c5055a2b040c9715f1" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.919073 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-6f9ml" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.971409 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49a9a990-85b6-442f-aaa3-43f38451ee93-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.977300 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:21:13 crc kubenswrapper[4852]: I1201 20:21:13.983694 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-6f9ml"] Dec 01 20:21:14 crc kubenswrapper[4852]: I1201 20:21:14.024074 4852 scope.go:117] "RemoveContainer" containerID="1b8cb127179104119957457a3084ceb076970412542d856805977a819874d907" Dec 01 20:21:14 crc kubenswrapper[4852]: I1201 20:21:14.338734 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" path="/var/lib/kubelet/pods/49a9a990-85b6-442f-aaa3-43f38451ee93/volumes" Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.949937 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b634d10f-beff-4ef8-8602-8e4acb8a5a4b","Type":"ContainerStarted","Data":"1a3020dd02aaa6a46b248b982f5497dd2bb5178a04e3e26688b42ba73051ade6"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.950726 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.953904 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rwcdk" event={"ID":"d0b85f35-5e0a-45ed-a162-02c81ffbdedb","Type":"ContainerStarted","Data":"cf790f99b1afe42dbae71497fc2e9faaaa98035a342cdd76daa0c1a6158db41b"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.956390 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"775ec07e-8dd8-47f7-94f1-4c5355335a82","Type":"ContainerStarted","Data":"9e2d601aa1361aec4fcc0214a900980de76abbced0fe3dc3b3d10d95ffed96b0"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.958916 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e569dc0-0de8-47cf-a1d3-1e649efde4af","Type":"ContainerStarted","Data":"5acf1fb3b8f4e99048c1685c233349dc3ce1701aa3e6aaf49adc3804c95d47c4"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.960883 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a512edf-0808-47a9-91dd-81da3cf1cda9","Type":"ContainerStarted","Data":"289798b9e1a63a473fe7d9d38f98162eb8476e3d68b859c6d2ca3506505a678c"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.963275 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk" event={"ID":"5ef29299-3043-4921-b77b-07416d89ed96","Type":"ContainerStarted","Data":"345275c70020686e24247982a31d1dbf16d588d01b8b84f25037176049f3c9a0"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.963407 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8kwmk" Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.966976 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe","Type":"ContainerStarted","Data":"3e43f707b463a60842f4baa4fc55f6c555099c33c89c8365d8da286c1b303b04"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.976683 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=12.182848169 podStartE2EDuration="24.976654197s" podCreationTimestamp="2025-12-01 20:20:51 +0000 UTC" firstStartedPulling="2025-12-01 20:21:02.242914782 +0000 UTC m=+982.169996199" lastFinishedPulling="2025-12-01 20:21:15.03672081 +0000 UTC m=+994.963802227" observedRunningTime="2025-12-01 20:21:15.967973372 +0000 UTC m=+995.895054799" watchObservedRunningTime="2025-12-01 20:21:15.976654197 +0000 UTC m=+995.903735614" Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.976870 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5724232f-c6e6-4356-b4b2-a622191bedaf","Type":"ContainerStarted","Data":"6f72c31da8c175f32708bc4e053e4a500521d607b6f4e21e2b6ba5ed132d7f82"} Dec 01 20:21:15 crc kubenswrapper[4852]: I1201 20:21:15.978427 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.056490 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8kwmk" podStartSLOduration=10.403304851 podStartE2EDuration="22.056443698s" podCreationTimestamp="2025-12-01 20:20:54 +0000 UTC" firstStartedPulling="2025-12-01 20:21:01.909575997 +0000 UTC m=+981.836657414" lastFinishedPulling="2025-12-01 20:21:13.562714844 +0000 UTC m=+993.489796261" observedRunningTime="2025-12-01 20:21:16.054868919 +0000 UTC m=+995.981950346" watchObservedRunningTime="2025-12-01 20:21:16.056443698 +0000 UTC m=+995.983525115" Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.113104 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.156418508 podStartE2EDuration="27.113077028s" podCreationTimestamp="2025-12-01 20:20:49 +0000 UTC" firstStartedPulling="2025-12-01 20:21:02.216448855 +0000 UTC m=+982.143530272" lastFinishedPulling="2025-12-01 20:21:14.173107365 +0000 UTC m=+994.100188792" observedRunningTime="2025-12-01 20:21:16.110765145 +0000 UTC m=+996.037846592" watchObservedRunningTime="2025-12-01 20:21:16.113077028 +0000 UTC m=+996.040158445" Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.989798 4852 generic.go:334] "Generic (PLEG): container finished" podID="d0b85f35-5e0a-45ed-a162-02c81ffbdedb" containerID="cf790f99b1afe42dbae71497fc2e9faaaa98035a342cdd76daa0c1a6158db41b" exitCode=0 Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.989904 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rwcdk" event={"ID":"d0b85f35-5e0a-45ed-a162-02c81ffbdedb","Type":"ContainerDied","Data":"cf790f99b1afe42dbae71497fc2e9faaaa98035a342cdd76daa0c1a6158db41b"} Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.992334 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerStarted","Data":"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5"} Dec 01 20:21:16 crc kubenswrapper[4852]: I1201 20:21:16.994952 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerStarted","Data":"ee7c14abc615f6557fff5cc024b7db4e9b29feed38c8bee4c32bce0f856b83fd"} Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.011374 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rwcdk" event={"ID":"d0b85f35-5e0a-45ed-a162-02c81ffbdedb","Type":"ContainerStarted","Data":"7b6289b423e6c3f92551e80f0faa4cc79bfc1c1dbb5f7dcf963ca859b09b1f40"} Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.012063 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.012083 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rwcdk" event={"ID":"d0b85f35-5e0a-45ed-a162-02c81ffbdedb","Type":"ContainerStarted","Data":"b9b652a5fd8d512ed9402c634dc7a0ad5a061fb6ec48ea43e6b123677c7a319c"} Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.012099 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.037216 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-rwcdk" podStartSLOduration=12.813143514 podStartE2EDuration="24.03719912s" podCreationTimestamp="2025-12-01 20:20:54 +0000 UTC" firstStartedPulling="2025-12-01 20:21:02.413667548 +0000 UTC m=+982.340748955" lastFinishedPulling="2025-12-01 20:21:13.637723144 +0000 UTC m=+993.564804561" observedRunningTime="2025-12-01 20:21:18.035146186 +0000 UTC m=+997.962227603" watchObservedRunningTime="2025-12-01 20:21:18.03719912 +0000 UTC m=+997.964280537" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.196452 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-86jpt"] Dec 01 20:21:18 crc kubenswrapper[4852]: E1201 20:21:18.196881 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="init" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.196904 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="init" Dec 01 20:21:18 crc kubenswrapper[4852]: E1201 20:21:18.196936 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="dnsmasq-dns" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.196943 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="dnsmasq-dns" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.197111 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="49a9a990-85b6-442f-aaa3-43f38451ee93" containerName="dnsmasq-dns" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.198886 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.204904 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.214507 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-86jpt"] Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.262738 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-combined-ca-bundle\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.262791 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovs-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.262833 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.263065 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b30528cc-b404-4564-bcac-da1fdc60ae52-config\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.263130 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovn-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.263175 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84mnt\" (UniqueName: \"kubernetes.io/projected/b30528cc-b404-4564-bcac-da1fdc60ae52-kube-api-access-84mnt\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.365258 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84mnt\" (UniqueName: \"kubernetes.io/projected/b30528cc-b404-4564-bcac-da1fdc60ae52-kube-api-access-84mnt\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.365703 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-combined-ca-bundle\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.365782 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovs-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.365858 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.366021 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b30528cc-b404-4564-bcac-da1fdc60ae52-config\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.366057 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovn-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.375942 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovs-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.375985 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b30528cc-b404-4564-bcac-da1fdc60ae52-ovn-rundir\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.376308 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b30528cc-b404-4564-bcac-da1fdc60ae52-config\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.384608 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.391903 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84mnt\" (UniqueName: \"kubernetes.io/projected/b30528cc-b404-4564-bcac-da1fdc60ae52-kube-api-access-84mnt\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.403794 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30528cc-b404-4564-bcac-da1fdc60ae52-combined-ca-bundle\") pod \"ovn-controller-metrics-86jpt\" (UID: \"b30528cc-b404-4564-bcac-da1fdc60ae52\") " pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.462637 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-684dc5d7df-z6zkp"] Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.471054 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.474523 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.484617 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-684dc5d7df-z6zkp"] Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.534622 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-86jpt" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.569530 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.569645 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.569673 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdz5l\" (UniqueName: \"kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.569731 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.647214 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-684dc5d7df-z6zkp"] Dec 01 20:21:18 crc kubenswrapper[4852]: E1201 20:21:18.648306 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-pdz5l ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" podUID="3c27feba-7df6-40cb-950c-f54252335955" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.670846 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.670922 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.670945 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdz5l\" (UniqueName: \"kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.670984 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.672007 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.672020 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.672714 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.682697 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.685472 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.688341 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.691808 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdz5l\" (UniqueName: \"kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l\") pod \"dnsmasq-dns-684dc5d7df-z6zkp\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.716672 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.772711 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.772775 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hht2v\" (UniqueName: \"kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.772851 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.772931 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.772963 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.874162 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.874219 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hht2v\" (UniqueName: \"kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.874277 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.874336 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.874359 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.875331 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.875931 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.876330 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.876387 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:18 crc kubenswrapper[4852]: I1201 20:21:18.897426 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hht2v\" (UniqueName: \"kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v\") pod \"dnsmasq-dns-58bd875f97-5m8sc\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.020779 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.032035 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.052242 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.076765 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdz5l\" (UniqueName: \"kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l\") pod \"3c27feba-7df6-40cb-950c-f54252335955\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.077401 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc\") pod \"3c27feba-7df6-40cb-950c-f54252335955\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.077506 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb\") pod \"3c27feba-7df6-40cb-950c-f54252335955\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.077556 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config\") pod \"3c27feba-7df6-40cb-950c-f54252335955\" (UID: \"3c27feba-7df6-40cb-950c-f54252335955\") " Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.078889 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c27feba-7df6-40cb-950c-f54252335955" (UID: "3c27feba-7df6-40cb-950c-f54252335955"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.079027 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config" (OuterVolumeSpecName: "config") pod "3c27feba-7df6-40cb-950c-f54252335955" (UID: "3c27feba-7df6-40cb-950c-f54252335955"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.079195 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c27feba-7df6-40cb-950c-f54252335955" (UID: "3c27feba-7df6-40cb-950c-f54252335955"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.081049 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l" (OuterVolumeSpecName: "kube-api-access-pdz5l") pod "3c27feba-7df6-40cb-950c-f54252335955" (UID: "3c27feba-7df6-40cb-950c-f54252335955"). InnerVolumeSpecName "kube-api-access-pdz5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.179973 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.180368 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.180379 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdz5l\" (UniqueName: \"kubernetes.io/projected/3c27feba-7df6-40cb-950c-f54252335955-kube-api-access-pdz5l\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:19 crc kubenswrapper[4852]: I1201 20:21:19.180389 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c27feba-7df6-40cb-950c-f54252335955-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.036143 4852 generic.go:334] "Generic (PLEG): container finished" podID="5a512edf-0808-47a9-91dd-81da3cf1cda9" containerID="289798b9e1a63a473fe7d9d38f98162eb8476e3d68b859c6d2ca3506505a678c" exitCode=0 Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.036667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a512edf-0808-47a9-91dd-81da3cf1cda9","Type":"ContainerDied","Data":"289798b9e1a63a473fe7d9d38f98162eb8476e3d68b859c6d2ca3506505a678c"} Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.039008 4852 generic.go:334] "Generic (PLEG): container finished" podID="775ec07e-8dd8-47f7-94f1-4c5355335a82" containerID="9e2d601aa1361aec4fcc0214a900980de76abbced0fe3dc3b3d10d95ffed96b0" exitCode=0 Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.039074 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-684dc5d7df-z6zkp" Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.040268 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"775ec07e-8dd8-47f7-94f1-4c5355335a82","Type":"ContainerDied","Data":"9e2d601aa1361aec4fcc0214a900980de76abbced0fe3dc3b3d10d95ffed96b0"} Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.113592 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-684dc5d7df-z6zkp"] Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.126366 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-684dc5d7df-z6zkp"] Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.158650 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-86jpt"] Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.245094 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:20 crc kubenswrapper[4852]: W1201 20:21:20.250135 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa303a3a_fc0e_46e6_a125_4645d6de74f7.slice/crio-6a2db48c96d11afa54eee483260d902c98c28e36b967d8395a002043e5d0c55d WatchSource:0}: Error finding container 6a2db48c96d11afa54eee483260d902c98c28e36b967d8395a002043e5d0c55d: Status 404 returned error can't find the container with id 6a2db48c96d11afa54eee483260d902c98c28e36b967d8395a002043e5d0c55d Dec 01 20:21:20 crc kubenswrapper[4852]: I1201 20:21:20.336878 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c27feba-7df6-40cb-950c-f54252335955" path="/var/lib/kubelet/pods/3c27feba-7df6-40cb-950c-f54252335955/volumes" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.048331 4852 generic.go:334] "Generic (PLEG): container finished" podID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerID="86119d281e16e66ae796ba4cb23a6a848848ad6adea5124eb0e32b4b1afb56c8" exitCode=0 Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.048417 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" event={"ID":"fa303a3a-fc0e-46e6-a125-4645d6de74f7","Type":"ContainerDied","Data":"86119d281e16e66ae796ba4cb23a6a848848ad6adea5124eb0e32b4b1afb56c8"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.048722 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" event={"ID":"fa303a3a-fc0e-46e6-a125-4645d6de74f7","Type":"ContainerStarted","Data":"6a2db48c96d11afa54eee483260d902c98c28e36b967d8395a002043e5d0c55d"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.051235 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-86jpt" event={"ID":"b30528cc-b404-4564-bcac-da1fdc60ae52","Type":"ContainerStarted","Data":"44b74092ca8f4f69762dd49725bda3bc4c8f6d3de594ec3198acacf6f0841cec"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.051293 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-86jpt" event={"ID":"b30528cc-b404-4564-bcac-da1fdc60ae52","Type":"ContainerStarted","Data":"69cbbc137c7c541eaa7a9c294b7f30d6d55e779720899253315d81f52abc5a9c"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.055672 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"775ec07e-8dd8-47f7-94f1-4c5355335a82","Type":"ContainerStarted","Data":"d198f01f8d35e5eb50068958da3100b93a8eefaca11b07af718b38873adf1bf6"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.059522 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e569dc0-0de8-47cf-a1d3-1e649efde4af","Type":"ContainerStarted","Data":"72eb72f772f3c1d57e9b2bbdfb2f2fb1cc4cb4ee9ceb82f64a652957631bf923"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.060895 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a512edf-0808-47a9-91dd-81da3cf1cda9","Type":"ContainerStarted","Data":"8f519d3a813e9bc045e1db855d88641d5a88a2bdeaefad02e71570a7249b7b5d"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.065212 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ff6ee0b-6797-494c-8166-88c5cc7cf3fe","Type":"ContainerStarted","Data":"77c69d41bdac16fb4d243108a6d83e8dfd690e776d108d1f113ca6deff34711a"} Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.135644 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=21.245679314 podStartE2EDuration="33.135608965s" podCreationTimestamp="2025-12-01 20:20:48 +0000 UTC" firstStartedPulling="2025-12-01 20:21:01.434583724 +0000 UTC m=+981.361665141" lastFinishedPulling="2025-12-01 20:21:13.324513375 +0000 UTC m=+993.251594792" observedRunningTime="2025-12-01 20:21:21.096480539 +0000 UTC m=+1001.023561996" watchObservedRunningTime="2025-12-01 20:21:21.135608965 +0000 UTC m=+1001.062690422" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.161294 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-86jpt" podStartSLOduration=3.161268006 podStartE2EDuration="3.161268006s" podCreationTimestamp="2025-12-01 20:21:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:21:21.124660239 +0000 UTC m=+1001.051741666" watchObservedRunningTime="2025-12-01 20:21:21.161268006 +0000 UTC m=+1001.088349423" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.166573 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.170705 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=15.039009772 podStartE2EDuration="27.170683924s" podCreationTimestamp="2025-12-01 20:20:54 +0000 UTC" firstStartedPulling="2025-12-01 20:21:07.537666082 +0000 UTC m=+987.464747489" lastFinishedPulling="2025-12-01 20:21:19.669340224 +0000 UTC m=+999.596421641" observedRunningTime="2025-12-01 20:21:21.161089161 +0000 UTC m=+1001.088170588" watchObservedRunningTime="2025-12-01 20:21:21.170683924 +0000 UTC m=+1001.097765341" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.198499 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=6.8797022420000005 podStartE2EDuration="24.198435201s" podCreationTimestamp="2025-12-01 20:20:57 +0000 UTC" firstStartedPulling="2025-12-01 20:21:02.359641301 +0000 UTC m=+982.286722718" lastFinishedPulling="2025-12-01 20:21:19.67837422 +0000 UTC m=+999.605455677" observedRunningTime="2025-12-01 20:21:21.187346621 +0000 UTC m=+1001.114428048" watchObservedRunningTime="2025-12-01 20:21:21.198435201 +0000 UTC m=+1001.125516618" Dec 01 20:21:21 crc kubenswrapper[4852]: I1201 20:21:21.216959 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=22.758091734 podStartE2EDuration="35.216942136s" podCreationTimestamp="2025-12-01 20:20:46 +0000 UTC" firstStartedPulling="2025-12-01 20:21:01.566960588 +0000 UTC m=+981.494042005" lastFinishedPulling="2025-12-01 20:21:14.02581099 +0000 UTC m=+993.952892407" observedRunningTime="2025-12-01 20:21:21.214501949 +0000 UTC m=+1001.141583376" watchObservedRunningTime="2025-12-01 20:21:21.216942136 +0000 UTC m=+1001.144023553" Dec 01 20:21:22 crc kubenswrapper[4852]: I1201 20:21:22.051702 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 20:21:22 crc kubenswrapper[4852]: I1201 20:21:22.075128 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" event={"ID":"fa303a3a-fc0e-46e6-a125-4645d6de74f7","Type":"ContainerStarted","Data":"a3fe81fac77606227c96de8aa45f638ae053ebb10b52870b288c4f493f10b584"} Dec 01 20:21:22 crc kubenswrapper[4852]: I1201 20:21:22.108499 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" podStartSLOduration=4.108478453 podStartE2EDuration="4.108478453s" podCreationTimestamp="2025-12-01 20:21:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:21:22.10395042 +0000 UTC m=+1002.031031847" watchObservedRunningTime="2025-12-01 20:21:22.108478453 +0000 UTC m=+1002.035559880" Dec 01 20:21:23 crc kubenswrapper[4852]: I1201 20:21:23.084065 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:23 crc kubenswrapper[4852]: I1201 20:21:23.166023 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 01 20:21:23 crc kubenswrapper[4852]: I1201 20:21:23.166084 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 01 20:21:23 crc kubenswrapper[4852]: I1201 20:21:23.213158 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 01 20:21:23 crc kubenswrapper[4852]: I1201 20:21:23.213299 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.092561 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.137122 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.140736 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.395288 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.396964 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.400927 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-gxp8z" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.401246 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.401522 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.412037 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.427251 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.482787 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.482915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn28h\" (UniqueName: \"kubernetes.io/projected/7170ebb9-5806-4a03-8316-8c396a916197-kube-api-access-gn28h\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.482988 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.483033 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.483069 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7170ebb9-5806-4a03-8316-8c396a916197-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.483114 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-config\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.483179 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-scripts\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585429 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585523 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn28h\" (UniqueName: \"kubernetes.io/projected/7170ebb9-5806-4a03-8316-8c396a916197-kube-api-access-gn28h\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585555 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585589 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585610 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7170ebb9-5806-4a03-8316-8c396a916197-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585647 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-config\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.585691 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-scripts\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.586816 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7170ebb9-5806-4a03-8316-8c396a916197-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.588199 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-scripts\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.588908 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7170ebb9-5806-4a03-8316-8c396a916197-config\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.593654 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.594210 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.595495 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7170ebb9-5806-4a03-8316-8c396a916197-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.617586 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn28h\" (UniqueName: \"kubernetes.io/projected/7170ebb9-5806-4a03-8316-8c396a916197-kube-api-access-gn28h\") pod \"ovn-northd-0\" (UID: \"7170ebb9-5806-4a03-8316-8c396a916197\") " pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.719700 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 20:21:24 crc kubenswrapper[4852]: I1201 20:21:24.999658 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 01 20:21:25 crc kubenswrapper[4852]: I1201 20:21:25.295437 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 20:21:25 crc kubenswrapper[4852]: W1201 20:21:25.304002 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7170ebb9_5806_4a03_8316_8c396a916197.slice/crio-310f0dccd138623890db9cb17a2ba9fc037b964f6efb95d1a64c6b076371cd4f WatchSource:0}: Error finding container 310f0dccd138623890db9cb17a2ba9fc037b964f6efb95d1a64c6b076371cd4f: Status 404 returned error can't find the container with id 310f0dccd138623890db9cb17a2ba9fc037b964f6efb95d1a64c6b076371cd4f Dec 01 20:21:26 crc kubenswrapper[4852]: I1201 20:21:26.115748 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7170ebb9-5806-4a03-8316-8c396a916197","Type":"ContainerStarted","Data":"310f0dccd138623890db9cb17a2ba9fc037b964f6efb95d1a64c6b076371cd4f"} Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.143604 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7170ebb9-5806-4a03-8316-8c396a916197","Type":"ContainerStarted","Data":"5d48f4ed8bac8e93b20a9eb1d2f75ebedee433be526713ccd470b4d80d6cddbf"} Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.144504 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.144531 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7170ebb9-5806-4a03-8316-8c396a916197","Type":"ContainerStarted","Data":"4770bbdb2cbdc6386499d85400dd23e273b0e21d1a549f62105f1671078f60a9"} Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.167852 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.790504943 podStartE2EDuration="4.167817638s" podCreationTimestamp="2025-12-01 20:21:24 +0000 UTC" firstStartedPulling="2025-12-01 20:21:25.307778797 +0000 UTC m=+1005.234860214" lastFinishedPulling="2025-12-01 20:21:27.685091502 +0000 UTC m=+1007.612172909" observedRunningTime="2025-12-01 20:21:28.165166154 +0000 UTC m=+1008.092247601" watchObservedRunningTime="2025-12-01 20:21:28.167817638 +0000 UTC m=+1008.094899055" Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.480910 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.481661 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 01 20:21:28 crc kubenswrapper[4852]: I1201 20:21:28.584634 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.054764 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.126226 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.127025 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="dnsmasq-dns" containerID="cri-o://7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b" gracePeriod=10 Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.275153 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.677750 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.689863 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.689913 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.744539 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-7wvf6"] Dec 01 20:21:29 crc kubenswrapper[4852]: E1201 20:21:29.745190 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="init" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.745212 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="init" Dec 01 20:21:29 crc kubenswrapper[4852]: E1201 20:21:29.745248 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="dnsmasq-dns" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.745259 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="dnsmasq-dns" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.745424 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="3741c48a-18da-4405-980e-954b30ea35a4" containerName="dnsmasq-dns" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.746331 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.756948 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f1cb-account-create-update-6rb5m"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.758199 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.761201 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.768553 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7wvf6"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.799772 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc\") pod \"3741c48a-18da-4405-980e-954b30ea35a4\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.799898 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7kvk\" (UniqueName: \"kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk\") pod \"3741c48a-18da-4405-980e-954b30ea35a4\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.800181 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config\") pod \"3741c48a-18da-4405-980e-954b30ea35a4\" (UID: \"3741c48a-18da-4405-980e-954b30ea35a4\") " Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.809895 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f1cb-account-create-update-6rb5m"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.815637 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk" (OuterVolumeSpecName: "kube-api-access-q7kvk") pod "3741c48a-18da-4405-980e-954b30ea35a4" (UID: "3741c48a-18da-4405-980e-954b30ea35a4"). InnerVolumeSpecName "kube-api-access-q7kvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.866770 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.869848 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config" (OuterVolumeSpecName: "config") pod "3741c48a-18da-4405-980e-954b30ea35a4" (UID: "3741c48a-18da-4405-980e-954b30ea35a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.877114 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3741c48a-18da-4405-980e-954b30ea35a4" (UID: "3741c48a-18da-4405-980e-954b30ea35a4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.908357 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.908439 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv62c\" (UniqueName: \"kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.908480 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.908747 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vmdv\" (UniqueName: \"kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.909264 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.909280 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7kvk\" (UniqueName: \"kubernetes.io/projected/3741c48a-18da-4405-980e-954b30ea35a4-kube-api-access-q7kvk\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.909291 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3741c48a-18da-4405-980e-954b30ea35a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.930544 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tj7jd"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.931984 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.946585 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1ce5-account-create-update-4nwzp"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.948000 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.950958 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.953439 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tj7jd"] Dec 01 20:21:29 crc kubenswrapper[4852]: I1201 20:21:29.965442 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1ce5-account-create-update-4nwzp"] Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010495 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vmdv\" (UniqueName: \"kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010574 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010630 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2vbg\" (UniqueName: \"kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010673 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010704 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010728 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw2rn\" (UniqueName: \"kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010746 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv62c\" (UniqueName: \"kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.010766 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.011605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.012006 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.028853 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vmdv\" (UniqueName: \"kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv\") pod \"keystone-f1cb-account-create-update-6rb5m\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.029220 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv62c\" (UniqueName: \"kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c\") pod \"keystone-db-create-7wvf6\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.072051 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.114302 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.114623 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2vbg\" (UniqueName: \"kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.114745 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.114781 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw2rn\" (UniqueName: \"kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.114864 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.115813 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.115981 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.143109 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2vbg\" (UniqueName: \"kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg\") pod \"placement-db-create-tj7jd\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.147708 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw2rn\" (UniqueName: \"kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn\") pod \"placement-1ce5-account-create-update-4nwzp\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.178181 4852 generic.go:334] "Generic (PLEG): container finished" podID="3741c48a-18da-4405-980e-954b30ea35a4" containerID="7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b" exitCode=0 Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.178253 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" event={"ID":"3741c48a-18da-4405-980e-954b30ea35a4","Type":"ContainerDied","Data":"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b"} Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.178308 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" event={"ID":"3741c48a-18da-4405-980e-954b30ea35a4","Type":"ContainerDied","Data":"f38164978fd7fb3249a52029c3adcb8c03267c019e8d9422400d6754bae5842e"} Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.178331 4852 scope.go:117] "RemoveContainer" containerID="7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.178344 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-8l6rw" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.227513 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.233665 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-8l6rw"] Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.239506 4852 scope.go:117] "RemoveContainer" containerID="b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.250785 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.267033 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.308518 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.319567 4852 scope.go:117] "RemoveContainer" containerID="7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b" Dec 01 20:21:30 crc kubenswrapper[4852]: E1201 20:21:30.323220 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b\": container with ID starting with 7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b not found: ID does not exist" containerID="7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.323274 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b"} err="failed to get container status \"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b\": rpc error: code = NotFound desc = could not find container \"7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b\": container with ID starting with 7b271a4522e2fdcc57bb0c411c8e4176a7f719672b47d09e24c1b0f13f5c468b not found: ID does not exist" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.323305 4852 scope.go:117] "RemoveContainer" containerID="b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a" Dec 01 20:21:30 crc kubenswrapper[4852]: E1201 20:21:30.323788 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a\": container with ID starting with b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a not found: ID does not exist" containerID="b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.323852 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a"} err="failed to get container status \"b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a\": rpc error: code = NotFound desc = could not find container \"b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a\": container with ID starting with b3e1fd4ab434ef7e77174d16d184063b8f2c7ffd323596787bf2249787292d4a not found: ID does not exist" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.355631 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3741c48a-18da-4405-980e-954b30ea35a4" path="/var/lib/kubelet/pods/3741c48a-18da-4405-980e-954b30ea35a4/volumes" Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.681256 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7wvf6"] Dec 01 20:21:30 crc kubenswrapper[4852]: W1201 20:21:30.809684 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d51ca87_b472_4aba_b566_af66d1c18028.slice/crio-dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f WatchSource:0}: Error finding container dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f: Status 404 returned error can't find the container with id dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.817902 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tj7jd"] Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.834266 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f1cb-account-create-update-6rb5m"] Dec 01 20:21:30 crc kubenswrapper[4852]: W1201 20:21:30.840571 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e8835f4_45e5_441c_8039_29193087baca.slice/crio-079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af WatchSource:0}: Error finding container 079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af: Status 404 returned error can't find the container with id 079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af Dec 01 20:21:30 crc kubenswrapper[4852]: I1201 20:21:30.845669 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1ce5-account-create-update-4nwzp"] Dec 01 20:21:31 crc kubenswrapper[4852]: I1201 20:21:31.192282 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tj7jd" event={"ID":"04e3b15e-4dde-441e-b414-07ab83f3e23b","Type":"ContainerStarted","Data":"18345b29627ed1ed632b85deeb748f8a3aed003daae23d38d763667bc197ad97"} Dec 01 20:21:31 crc kubenswrapper[4852]: I1201 20:21:31.194140 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7wvf6" event={"ID":"6de166a4-30e1-41db-81c9-024c530a1da3","Type":"ContainerStarted","Data":"1a230c2860f445133eab61a21896bf032bce3be75d0595d5fd21bb6c214eee85"} Dec 01 20:21:31 crc kubenswrapper[4852]: I1201 20:21:31.195874 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f1cb-account-create-update-6rb5m" event={"ID":"0d51ca87-b472-4aba-b566-af66d1c18028","Type":"ContainerStarted","Data":"dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f"} Dec 01 20:21:31 crc kubenswrapper[4852]: I1201 20:21:31.198616 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1ce5-account-create-update-4nwzp" event={"ID":"3e8835f4-45e5-441c-8039-29193087baca","Type":"ContainerStarted","Data":"079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af"} Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.122611 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.124886 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.149103 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.261020 4852 generic.go:334] "Generic (PLEG): container finished" podID="0d51ca87-b472-4aba-b566-af66d1c18028" containerID="7b4c887a6a74ad33ca4fd69edcdc0d3f0d9358019f7657def2578ad9eb469981" exitCode=0 Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.263209 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f1cb-account-create-update-6rb5m" event={"ID":"0d51ca87-b472-4aba-b566-af66d1c18028","Type":"ContainerDied","Data":"7b4c887a6a74ad33ca4fd69edcdc0d3f0d9358019f7657def2578ad9eb469981"} Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.268466 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.268524 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.268570 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxmhv\" (UniqueName: \"kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.268620 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.268649 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.297823 4852 generic.go:334] "Generic (PLEG): container finished" podID="3e8835f4-45e5-441c-8039-29193087baca" containerID="104e5ea29caee153b5ee3ffe1d28569b952bf663a2c9b8e7ef6130abab92a43f" exitCode=0 Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.297923 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1ce5-account-create-update-4nwzp" event={"ID":"3e8835f4-45e5-441c-8039-29193087baca","Type":"ContainerDied","Data":"104e5ea29caee153b5ee3ffe1d28569b952bf663a2c9b8e7ef6130abab92a43f"} Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.301479 4852 generic.go:334] "Generic (PLEG): container finished" podID="04e3b15e-4dde-441e-b414-07ab83f3e23b" containerID="0431e02fb55423d14e8915bef5785b3d924525d06e5747a6ab37d5f05bd33a6f" exitCode=0 Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.301533 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tj7jd" event={"ID":"04e3b15e-4dde-441e-b414-07ab83f3e23b","Type":"ContainerDied","Data":"0431e02fb55423d14e8915bef5785b3d924525d06e5747a6ab37d5f05bd33a6f"} Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.302862 4852 generic.go:334] "Generic (PLEG): container finished" podID="6de166a4-30e1-41db-81c9-024c530a1da3" containerID="4ef873b4b7747ccc26f786bc85441d4a38cdf09f00ec5e6b5179dd3cb3da5347" exitCode=0 Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.302889 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7wvf6" event={"ID":"6de166a4-30e1-41db-81c9-024c530a1da3","Type":"ContainerDied","Data":"4ef873b4b7747ccc26f786bc85441d4a38cdf09f00ec5e6b5179dd3cb3da5347"} Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.370307 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.370367 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.370393 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxmhv\" (UniqueName: \"kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.370447 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.370490 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.372799 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.373091 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.373314 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.373402 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.392012 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxmhv\" (UniqueName: \"kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv\") pod \"dnsmasq-dns-7c8cb8df65-kfqqq\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:32 crc kubenswrapper[4852]: I1201 20:21:32.506179 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.011473 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:21:33 crc kubenswrapper[4852]: W1201 20:21:33.025027 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e2d3b9c_bafa_4691_ac33_7852955ea9ad.slice/crio-1f66cb33700dc2765476ba6515ab9cf3befdaa195b1363e18f32f25df5d5a567 WatchSource:0}: Error finding container 1f66cb33700dc2765476ba6515ab9cf3befdaa195b1363e18f32f25df5d5a567: Status 404 returned error can't find the container with id 1f66cb33700dc2765476ba6515ab9cf3befdaa195b1363e18f32f25df5d5a567 Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.275714 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.283118 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.285883 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.286276 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.286418 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-pkn42" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.286496 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.300310 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.326081 4852 generic.go:334] "Generic (PLEG): container finished" podID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerID="9bb626e26446a3c4d9b03971591a8117a63d91f5bdbc5c7b857e3ef5f1a204dd" exitCode=0 Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.326859 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" event={"ID":"2e2d3b9c-bafa-4691-ac33-7852955ea9ad","Type":"ContainerDied","Data":"9bb626e26446a3c4d9b03971591a8117a63d91f5bdbc5c7b857e3ef5f1a204dd"} Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.326901 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" event={"ID":"2e2d3b9c-bafa-4691-ac33-7852955ea9ad","Type":"ContainerStarted","Data":"1f66cb33700dc2765476ba6515ab9cf3befdaa195b1363e18f32f25df5d5a567"} Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.389821 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.390183 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.390246 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtdbh\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-kube-api-access-qtdbh\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.390503 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-lock\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.390532 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-cache\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.492424 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-lock\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.492850 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-cache\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493175 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-lock\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0eec4983-f32d-4858-a382-eacc49d726fd-cache\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493307 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493598 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtdbh\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-kube-api-access-qtdbh\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.493715 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: E1201 20:21:33.493990 4852 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 20:21:33 crc kubenswrapper[4852]: E1201 20:21:33.494007 4852 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 20:21:33 crc kubenswrapper[4852]: E1201 20:21:33.494054 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift podName:0eec4983-f32d-4858-a382-eacc49d726fd nodeName:}" failed. No retries permitted until 2025-12-01 20:21:33.994035483 +0000 UTC m=+1013.921116900 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift") pod "swift-storage-0" (UID: "0eec4983-f32d-4858-a382-eacc49d726fd") : configmap "swift-ring-files" not found Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.513362 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtdbh\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-kube-api-access-qtdbh\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.522501 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.818389 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-v8pzw"] Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.819705 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.820601 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.823955 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.824335 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.824770 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.828832 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.833722 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.845897 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.847962 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-v8pzw"] Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900055 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2vbg\" (UniqueName: \"kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg\") pod \"04e3b15e-4dde-441e-b414-07ab83f3e23b\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900182 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts\") pod \"0d51ca87-b472-4aba-b566-af66d1c18028\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900283 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts\") pod \"04e3b15e-4dde-441e-b414-07ab83f3e23b\" (UID: \"04e3b15e-4dde-441e-b414-07ab83f3e23b\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900357 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv62c\" (UniqueName: \"kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c\") pod \"6de166a4-30e1-41db-81c9-024c530a1da3\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900386 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw2rn\" (UniqueName: \"kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn\") pod \"3e8835f4-45e5-441c-8039-29193087baca\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900419 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts\") pod \"3e8835f4-45e5-441c-8039-29193087baca\" (UID: \"3e8835f4-45e5-441c-8039-29193087baca\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900479 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vmdv\" (UniqueName: \"kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv\") pod \"0d51ca87-b472-4aba-b566-af66d1c18028\" (UID: \"0d51ca87-b472-4aba-b566-af66d1c18028\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900565 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts\") pod \"6de166a4-30e1-41db-81c9-024c530a1da3\" (UID: \"6de166a4-30e1-41db-81c9-024c530a1da3\") " Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900845 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900881 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900908 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900971 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.900998 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.901115 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch96x\" (UniqueName: \"kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.901147 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.902041 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6de166a4-30e1-41db-81c9-024c530a1da3" (UID: "6de166a4-30e1-41db-81c9-024c530a1da3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.902374 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04e3b15e-4dde-441e-b414-07ab83f3e23b" (UID: "04e3b15e-4dde-441e-b414-07ab83f3e23b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.902605 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3e8835f4-45e5-441c-8039-29193087baca" (UID: "3e8835f4-45e5-441c-8039-29193087baca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.903023 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0d51ca87-b472-4aba-b566-af66d1c18028" (UID: "0d51ca87-b472-4aba-b566-af66d1c18028"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.910242 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn" (OuterVolumeSpecName: "kube-api-access-mw2rn") pod "3e8835f4-45e5-441c-8039-29193087baca" (UID: "3e8835f4-45e5-441c-8039-29193087baca"). InnerVolumeSpecName "kube-api-access-mw2rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.910559 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c" (OuterVolumeSpecName: "kube-api-access-hv62c") pod "6de166a4-30e1-41db-81c9-024c530a1da3" (UID: "6de166a4-30e1-41db-81c9-024c530a1da3"). InnerVolumeSpecName "kube-api-access-hv62c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.912072 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg" (OuterVolumeSpecName: "kube-api-access-j2vbg") pod "04e3b15e-4dde-441e-b414-07ab83f3e23b" (UID: "04e3b15e-4dde-441e-b414-07ab83f3e23b"). InnerVolumeSpecName "kube-api-access-j2vbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:33 crc kubenswrapper[4852]: I1201 20:21:33.919855 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv" (OuterVolumeSpecName: "kube-api-access-4vmdv") pod "0d51ca87-b472-4aba-b566-af66d1c18028" (UID: "0d51ca87-b472-4aba-b566-af66d1c18028"). InnerVolumeSpecName "kube-api-access-4vmdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.013522 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.013742 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch96x\" (UniqueName: \"kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.013817 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.013919 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.013971 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014015 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014094 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014122 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014220 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vmdv\" (UniqueName: \"kubernetes.io/projected/0d51ca87-b472-4aba-b566-af66d1c18028-kube-api-access-4vmdv\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014250 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6de166a4-30e1-41db-81c9-024c530a1da3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014263 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2vbg\" (UniqueName: \"kubernetes.io/projected/04e3b15e-4dde-441e-b414-07ab83f3e23b-kube-api-access-j2vbg\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014275 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d51ca87-b472-4aba-b566-af66d1c18028-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014286 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04e3b15e-4dde-441e-b414-07ab83f3e23b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014303 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv62c\" (UniqueName: \"kubernetes.io/projected/6de166a4-30e1-41db-81c9-024c530a1da3-kube-api-access-hv62c\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014315 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw2rn\" (UniqueName: \"kubernetes.io/projected/3e8835f4-45e5-441c-8039-29193087baca-kube-api-access-mw2rn\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014325 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e8835f4-45e5-441c-8039-29193087baca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.014854 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: E1201 20:21:34.015031 4852 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 20:21:34 crc kubenswrapper[4852]: E1201 20:21:34.015054 4852 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 20:21:34 crc kubenswrapper[4852]: E1201 20:21:34.015314 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift podName:0eec4983-f32d-4858-a382-eacc49d726fd nodeName:}" failed. No retries permitted until 2025-12-01 20:21:35.015295808 +0000 UTC m=+1014.942377225 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift") pod "swift-storage-0" (UID: "0eec4983-f32d-4858-a382-eacc49d726fd") : configmap "swift-ring-files" not found Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.022806 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.022845 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.025805 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.027483 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.030505 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.035421 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch96x\" (UniqueName: \"kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x\") pod \"swift-ring-rebalance-v8pzw\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.156139 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.345105 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f1cb-account-create-update-6rb5m" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.350240 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1ce5-account-create-update-4nwzp" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.357043 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f1cb-account-create-update-6rb5m" event={"ID":"0d51ca87-b472-4aba-b566-af66d1c18028","Type":"ContainerDied","Data":"dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f"} Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.357102 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbd47c9c29d2226af55e7166d93660937bbddafa46a9552bb510b06e60d3a86f" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.357119 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1ce5-account-create-update-4nwzp" event={"ID":"3e8835f4-45e5-441c-8039-29193087baca","Type":"ContainerDied","Data":"079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af"} Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.357134 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="079a07d55500fb9eb330aa32f316ed094e13fcbfc8a01fbfb45b5697c6a654af" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.362010 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" event={"ID":"2e2d3b9c-bafa-4691-ac33-7852955ea9ad","Type":"ContainerStarted","Data":"a69edb9fd8ac1ecb1ab526427647c93f3fe6f820b2ecdfd5c40dbda53325f0d8"} Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.363138 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.366993 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7wvf6" event={"ID":"6de166a4-30e1-41db-81c9-024c530a1da3","Type":"ContainerDied","Data":"1a230c2860f445133eab61a21896bf032bce3be75d0595d5fd21bb6c214eee85"} Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.367024 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a230c2860f445133eab61a21896bf032bce3be75d0595d5fd21bb6c214eee85" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.367023 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7wvf6" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.369504 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tj7jd" event={"ID":"04e3b15e-4dde-441e-b414-07ab83f3e23b","Type":"ContainerDied","Data":"18345b29627ed1ed632b85deeb748f8a3aed003daae23d38d763667bc197ad97"} Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.369583 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18345b29627ed1ed632b85deeb748f8a3aed003daae23d38d763667bc197ad97" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.369692 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tj7jd" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.638635 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" podStartSLOduration=2.638610668 podStartE2EDuration="2.638610668s" podCreationTimestamp="2025-12-01 20:21:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:21:34.390267899 +0000 UTC m=+1014.317349336" watchObservedRunningTime="2025-12-01 20:21:34.638610668 +0000 UTC m=+1014.565692095" Dec 01 20:21:34 crc kubenswrapper[4852]: I1201 20:21:34.647552 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-v8pzw"] Dec 01 20:21:34 crc kubenswrapper[4852]: W1201 20:21:34.654700 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd31bdc12_ed48_45e2_b990_2b098be82119.slice/crio-40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d WatchSource:0}: Error finding container 40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d: Status 404 returned error can't find the container with id 40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.037405 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.037694 4852 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.037736 4852 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.037827 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift podName:0eec4983-f32d-4858-a382-eacc49d726fd nodeName:}" failed. No retries permitted until 2025-12-01 20:21:37.037796123 +0000 UTC m=+1016.964877540 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift") pod "swift-storage-0" (UID: "0eec4983-f32d-4858-a382-eacc49d726fd") : configmap "swift-ring-files" not found Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.256646 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-6sc57"] Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.257063 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d51ca87-b472-4aba-b566-af66d1c18028" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257087 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d51ca87-b472-4aba-b566-af66d1c18028" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.257106 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e8835f4-45e5-441c-8039-29193087baca" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257117 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e8835f4-45e5-441c-8039-29193087baca" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.257145 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e3b15e-4dde-441e-b414-07ab83f3e23b" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257152 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e3b15e-4dde-441e-b414-07ab83f3e23b" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: E1201 20:21:35.257163 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de166a4-30e1-41db-81c9-024c530a1da3" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257170 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de166a4-30e1-41db-81c9-024c530a1da3" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257319 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="6de166a4-30e1-41db-81c9-024c530a1da3" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257338 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="04e3b15e-4dde-441e-b414-07ab83f3e23b" containerName="mariadb-database-create" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257351 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d51ca87-b472-4aba-b566-af66d1c18028" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.257366 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e8835f4-45e5-441c-8039-29193087baca" containerName="mariadb-account-create-update" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.260489 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.270000 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6sc57"] Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.339103 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-82f7-account-create-update-qbdms"] Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.340404 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.342411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.342480 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq867\" (UniqueName: \"kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.348247 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.369302 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-82f7-account-create-update-qbdms"] Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.384057 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-v8pzw" event={"ID":"d31bdc12-ed48-45e2-b990-2b098be82119","Type":"ContainerStarted","Data":"40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d"} Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.444130 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.444610 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb2h7\" (UniqueName: \"kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.444757 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.448539 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.533320 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq867\" (UniqueName: \"kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.586585 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq867\" (UniqueName: \"kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867\") pod \"glance-db-create-6sc57\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.637621 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.636831 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.637709 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb2h7\" (UniqueName: \"kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.677636 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb2h7\" (UniqueName: \"kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7\") pod \"glance-82f7-account-create-update-qbdms\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.883162 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6sc57" Dec 01 20:21:35 crc kubenswrapper[4852]: I1201 20:21:35.959375 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:36 crc kubenswrapper[4852]: I1201 20:21:36.390327 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6sc57"] Dec 01 20:21:36 crc kubenswrapper[4852]: I1201 20:21:36.501135 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-82f7-account-create-update-qbdms"] Dec 01 20:21:36 crc kubenswrapper[4852]: W1201 20:21:36.509745 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8da5478_7e0f_4af2_bb80_213f45977feb.slice/crio-7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219 WatchSource:0}: Error finding container 7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219: Status 404 returned error can't find the container with id 7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219 Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.075169 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:37 crc kubenswrapper[4852]: E1201 20:21:37.075413 4852 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 20:21:37 crc kubenswrapper[4852]: E1201 20:21:37.075927 4852 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 20:21:37 crc kubenswrapper[4852]: E1201 20:21:37.076038 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift podName:0eec4983-f32d-4858-a382-eacc49d726fd nodeName:}" failed. No retries permitted until 2025-12-01 20:21:41.075990501 +0000 UTC m=+1021.003071948 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift") pod "swift-storage-0" (UID: "0eec4983-f32d-4858-a382-eacc49d726fd") : configmap "swift-ring-files" not found Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.403607 4852 generic.go:334] "Generic (PLEG): container finished" podID="f8da5478-7e0f-4af2-bb80-213f45977feb" containerID="0b6a57b185a79d4a759e2f3b397ebdc56624d2b6dcf863e8134f0416157edf9c" exitCode=0 Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.403707 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-82f7-account-create-update-qbdms" event={"ID":"f8da5478-7e0f-4af2-bb80-213f45977feb","Type":"ContainerDied","Data":"0b6a57b185a79d4a759e2f3b397ebdc56624d2b6dcf863e8134f0416157edf9c"} Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.403746 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-82f7-account-create-update-qbdms" event={"ID":"f8da5478-7e0f-4af2-bb80-213f45977feb","Type":"ContainerStarted","Data":"7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219"} Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.413512 4852 generic.go:334] "Generic (PLEG): container finished" podID="b974096d-536c-40f0-962f-a8407408b4c6" containerID="bda90d6ef5d7836105083c869def2442f47d8c99617ddf355fa1dd5a2bb95e11" exitCode=0 Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.413547 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6sc57" event={"ID":"b974096d-536c-40f0-962f-a8407408b4c6","Type":"ContainerDied","Data":"bda90d6ef5d7836105083c869def2442f47d8c99617ddf355fa1dd5a2bb95e11"} Dec 01 20:21:37 crc kubenswrapper[4852]: I1201 20:21:37.413566 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6sc57" event={"ID":"b974096d-536c-40f0-962f-a8407408b4c6","Type":"ContainerStarted","Data":"b7718c2a8151cb47bdb7302e2c0646fe0c257db8e380e545d6187328a53c9193"} Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.115430 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6sc57" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.167956 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.231580 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts\") pod \"f8da5478-7e0f-4af2-bb80-213f45977feb\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.231662 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb2h7\" (UniqueName: \"kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7\") pod \"f8da5478-7e0f-4af2-bb80-213f45977feb\" (UID: \"f8da5478-7e0f-4af2-bb80-213f45977feb\") " Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.231794 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts\") pod \"b974096d-536c-40f0-962f-a8407408b4c6\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.231833 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq867\" (UniqueName: \"kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867\") pod \"b974096d-536c-40f0-962f-a8407408b4c6\" (UID: \"b974096d-536c-40f0-962f-a8407408b4c6\") " Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.232502 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f8da5478-7e0f-4af2-bb80-213f45977feb" (UID: "f8da5478-7e0f-4af2-bb80-213f45977feb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.232852 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b974096d-536c-40f0-962f-a8407408b4c6" (UID: "b974096d-536c-40f0-962f-a8407408b4c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.237888 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7" (OuterVolumeSpecName: "kube-api-access-cb2h7") pod "f8da5478-7e0f-4af2-bb80-213f45977feb" (UID: "f8da5478-7e0f-4af2-bb80-213f45977feb"). InnerVolumeSpecName "kube-api-access-cb2h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.237959 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867" (OuterVolumeSpecName: "kube-api-access-sq867") pod "b974096d-536c-40f0-962f-a8407408b4c6" (UID: "b974096d-536c-40f0-962f-a8407408b4c6"). InnerVolumeSpecName "kube-api-access-sq867". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.341280 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b974096d-536c-40f0-962f-a8407408b4c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.341333 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq867\" (UniqueName: \"kubernetes.io/projected/b974096d-536c-40f0-962f-a8407408b4c6-kube-api-access-sq867\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.341358 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8da5478-7e0f-4af2-bb80-213f45977feb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.341372 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb2h7\" (UniqueName: \"kubernetes.io/projected/f8da5478-7e0f-4af2-bb80-213f45977feb-kube-api-access-cb2h7\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.473430 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6sc57" event={"ID":"b974096d-536c-40f0-962f-a8407408b4c6","Type":"ContainerDied","Data":"b7718c2a8151cb47bdb7302e2c0646fe0c257db8e380e545d6187328a53c9193"} Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.473538 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7718c2a8151cb47bdb7302e2c0646fe0c257db8e380e545d6187328a53c9193" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.473612 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6sc57" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.476303 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-82f7-account-create-update-qbdms" event={"ID":"f8da5478-7e0f-4af2-bb80-213f45977feb","Type":"ContainerDied","Data":"7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219"} Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.476369 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7edf976fe67ed7ea847e8ccc8e9b08ca1795d763eb13b1ab38dcbb17c5efc219" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.476402 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-82f7-account-create-update-qbdms" Dec 01 20:21:39 crc kubenswrapper[4852]: I1201 20:21:39.811581 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.487847 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-v8pzw" event={"ID":"d31bdc12-ed48-45e2-b990-2b098be82119","Type":"ContainerStarted","Data":"5401eefcb7269bf2ca2005e138686baedc6007351a423a4026b4a64de6b53d29"} Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.500759 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rl4kg"] Dec 01 20:21:40 crc kubenswrapper[4852]: E1201 20:21:40.501565 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8da5478-7e0f-4af2-bb80-213f45977feb" containerName="mariadb-account-create-update" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.501657 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8da5478-7e0f-4af2-bb80-213f45977feb" containerName="mariadb-account-create-update" Dec 01 20:21:40 crc kubenswrapper[4852]: E1201 20:21:40.501742 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b974096d-536c-40f0-962f-a8407408b4c6" containerName="mariadb-database-create" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.501793 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b974096d-536c-40f0-962f-a8407408b4c6" containerName="mariadb-database-create" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.502036 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b974096d-536c-40f0-962f-a8407408b4c6" containerName="mariadb-database-create" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.502111 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8da5478-7e0f-4af2-bb80-213f45977feb" containerName="mariadb-account-create-update" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.502853 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.505029 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-h82jn" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.505826 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.525554 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-v8pzw" podStartSLOduration=3.163462951 podStartE2EDuration="7.525531334s" podCreationTimestamp="2025-12-01 20:21:33 +0000 UTC" firstStartedPulling="2025-12-01 20:21:34.658084053 +0000 UTC m=+1014.585165470" lastFinishedPulling="2025-12-01 20:21:39.020152416 +0000 UTC m=+1018.947233853" observedRunningTime="2025-12-01 20:21:40.519017467 +0000 UTC m=+1020.446098884" watchObservedRunningTime="2025-12-01 20:21:40.525531334 +0000 UTC m=+1020.452612741" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.528953 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rl4kg"] Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.666191 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.666265 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzfqj\" (UniqueName: \"kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.666403 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.666509 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.768313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.768376 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzfqj\" (UniqueName: \"kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.768464 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.768514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.775917 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.776055 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.776081 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.792313 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzfqj\" (UniqueName: \"kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj\") pod \"glance-db-sync-rl4kg\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:40 crc kubenswrapper[4852]: I1201 20:21:40.821248 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rl4kg" Dec 01 20:21:41 crc kubenswrapper[4852]: I1201 20:21:41.184131 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:41 crc kubenswrapper[4852]: E1201 20:21:41.186794 4852 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 20:21:41 crc kubenswrapper[4852]: E1201 20:21:41.186844 4852 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 20:21:41 crc kubenswrapper[4852]: E1201 20:21:41.186978 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift podName:0eec4983-f32d-4858-a382-eacc49d726fd nodeName:}" failed. No retries permitted until 2025-12-01 20:21:49.186897407 +0000 UTC m=+1029.113978824 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift") pod "swift-storage-0" (UID: "0eec4983-f32d-4858-a382-eacc49d726fd") : configmap "swift-ring-files" not found Dec 01 20:21:41 crc kubenswrapper[4852]: I1201 20:21:41.436573 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rl4kg"] Dec 01 20:21:41 crc kubenswrapper[4852]: I1201 20:21:41.504750 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rl4kg" event={"ID":"5c546551-9ac1-4462-a439-3a63b69d678d","Type":"ContainerStarted","Data":"7fb54bcdf649e5a61d196f16d05671d33758f3b3f7f0c96c777933277471b6eb"} Dec 01 20:21:42 crc kubenswrapper[4852]: I1201 20:21:42.509849 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:21:42 crc kubenswrapper[4852]: I1201 20:21:42.580836 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:42 crc kubenswrapper[4852]: I1201 20:21:42.581220 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="dnsmasq-dns" containerID="cri-o://a3fe81fac77606227c96de8aa45f638ae053ebb10b52870b288c4f493f10b584" gracePeriod=10 Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.530884 4852 generic.go:334] "Generic (PLEG): container finished" podID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerID="a3fe81fac77606227c96de8aa45f638ae053ebb10b52870b288c4f493f10b584" exitCode=0 Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.531399 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" event={"ID":"fa303a3a-fc0e-46e6-a125-4645d6de74f7","Type":"ContainerDied","Data":"a3fe81fac77606227c96de8aa45f638ae053ebb10b52870b288c4f493f10b584"} Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.652766 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.746535 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb\") pod \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.748271 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb\") pod \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.748309 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hht2v\" (UniqueName: \"kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v\") pod \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.748367 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config\") pod \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.748431 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc\") pod \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\" (UID: \"fa303a3a-fc0e-46e6-a125-4645d6de74f7\") " Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.755936 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v" (OuterVolumeSpecName: "kube-api-access-hht2v") pod "fa303a3a-fc0e-46e6-a125-4645d6de74f7" (UID: "fa303a3a-fc0e-46e6-a125-4645d6de74f7"). InnerVolumeSpecName "kube-api-access-hht2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.793315 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fa303a3a-fc0e-46e6-a125-4645d6de74f7" (UID: "fa303a3a-fc0e-46e6-a125-4645d6de74f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.801898 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fa303a3a-fc0e-46e6-a125-4645d6de74f7" (UID: "fa303a3a-fc0e-46e6-a125-4645d6de74f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.806010 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fa303a3a-fc0e-46e6-a125-4645d6de74f7" (UID: "fa303a3a-fc0e-46e6-a125-4645d6de74f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.811392 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config" (OuterVolumeSpecName: "config") pod "fa303a3a-fc0e-46e6-a125-4645d6de74f7" (UID: "fa303a3a-fc0e-46e6-a125-4645d6de74f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.854813 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.854857 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.854868 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hht2v\" (UniqueName: \"kubernetes.io/projected/fa303a3a-fc0e-46e6-a125-4645d6de74f7-kube-api-access-hht2v\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.854880 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:43 crc kubenswrapper[4852]: I1201 20:21:43.854891 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa303a3a-fc0e-46e6-a125-4645d6de74f7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.567407 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" event={"ID":"fa303a3a-fc0e-46e6-a125-4645d6de74f7","Type":"ContainerDied","Data":"6a2db48c96d11afa54eee483260d902c98c28e36b967d8395a002043e5d0c55d"} Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.567495 4852 scope.go:117] "RemoveContainer" containerID="a3fe81fac77606227c96de8aa45f638ae053ebb10b52870b288c4f493f10b584" Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.567709 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-5m8sc" Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.682576 4852 scope.go:117] "RemoveContainer" containerID="86119d281e16e66ae796ba4cb23a6a848848ad6adea5124eb0e32b4b1afb56c8" Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.686011 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:44 crc kubenswrapper[4852]: I1201 20:21:44.699279 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-5m8sc"] Dec 01 20:21:45 crc kubenswrapper[4852]: I1201 20:21:45.334332 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8kwmk" podUID="5ef29299-3043-4921-b77b-07416d89ed96" containerName="ovn-controller" probeResult="failure" output=< Dec 01 20:21:45 crc kubenswrapper[4852]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 20:21:45 crc kubenswrapper[4852]: > Dec 01 20:21:46 crc kubenswrapper[4852]: I1201 20:21:46.334596 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" path="/var/lib/kubelet/pods/fa303a3a-fc0e-46e6-a125-4645d6de74f7/volumes" Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.611343 4852 generic.go:334] "Generic (PLEG): container finished" podID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerID="ee7c14abc615f6557fff5cc024b7db4e9b29feed38c8bee4c32bce0f856b83fd" exitCode=0 Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.611977 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerDied","Data":"ee7c14abc615f6557fff5cc024b7db4e9b29feed38c8bee4c32bce0f856b83fd"} Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.616240 4852 generic.go:334] "Generic (PLEG): container finished" podID="d31bdc12-ed48-45e2-b990-2b098be82119" containerID="5401eefcb7269bf2ca2005e138686baedc6007351a423a4026b4a64de6b53d29" exitCode=0 Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.616321 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-v8pzw" event={"ID":"d31bdc12-ed48-45e2-b990-2b098be82119","Type":"ContainerDied","Data":"5401eefcb7269bf2ca2005e138686baedc6007351a423a4026b4a64de6b53d29"} Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.621895 4852 generic.go:334] "Generic (PLEG): container finished" podID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerID="91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5" exitCode=0 Dec 01 20:21:48 crc kubenswrapper[4852]: I1201 20:21:48.621937 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerDied","Data":"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5"} Dec 01 20:21:49 crc kubenswrapper[4852]: I1201 20:21:49.271087 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:49 crc kubenswrapper[4852]: I1201 20:21:49.291777 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0eec4983-f32d-4858-a382-eacc49d726fd-etc-swift\") pod \"swift-storage-0\" (UID: \"0eec4983-f32d-4858-a382-eacc49d726fd\") " pod="openstack/swift-storage-0" Dec 01 20:21:49 crc kubenswrapper[4852]: I1201 20:21:49.547073 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.322444 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8kwmk" podUID="5ef29299-3043-4921-b77b-07416d89ed96" containerName="ovn-controller" probeResult="failure" output=< Dec 01 20:21:50 crc kubenswrapper[4852]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 20:21:50 crc kubenswrapper[4852]: > Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.366647 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.372295 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rwcdk" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.618257 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8kwmk-config-hcdwt"] Dec 01 20:21:50 crc kubenswrapper[4852]: E1201 20:21:50.618721 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="init" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.618735 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="init" Dec 01 20:21:50 crc kubenswrapper[4852]: E1201 20:21:50.618747 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="dnsmasq-dns" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.618753 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="dnsmasq-dns" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.619126 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa303a3a-fc0e-46e6-a125-4645d6de74f7" containerName="dnsmasq-dns" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.619875 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.622872 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.634467 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8kwmk-config-hcdwt"] Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812531 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812640 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812692 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fxcr\" (UniqueName: \"kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812789 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812890 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.812969 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.914661 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915118 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915214 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915247 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915287 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915318 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fxcr\" (UniqueName: \"kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.915360 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.916249 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.916317 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.916722 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.921673 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.940394 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fxcr\" (UniqueName: \"kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr\") pod \"ovn-controller-8kwmk-config-hcdwt\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:50 crc kubenswrapper[4852]: I1201 20:21:50.947206 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.358946 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8kwmk" podUID="5ef29299-3043-4921-b77b-07416d89ed96" containerName="ovn-controller" probeResult="failure" output=< Dec 01 20:21:55 crc kubenswrapper[4852]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 20:21:55 crc kubenswrapper[4852]: > Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.588886 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.740680 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741064 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741168 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741193 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741225 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741339 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch96x\" (UniqueName: \"kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.741386 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.743356 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.750870 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.775395 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x" (OuterVolumeSpecName: "kube-api-access-ch96x") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "kube-api-access-ch96x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.808400 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-v8pzw" event={"ID":"d31bdc12-ed48-45e2-b990-2b098be82119","Type":"ContainerDied","Data":"40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d"} Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.808471 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40aabcb9199e2c8e75a9796fe58d72499e95abaa7e1402a57fcd2c440511000d" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.808554 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-v8pzw" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.819803 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.843225 4852 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.843254 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch96x\" (UniqueName: \"kubernetes.io/projected/d31bdc12-ed48-45e2-b990-2b098be82119-kube-api-access-ch96x\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.843264 4852 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d31bdc12-ed48-45e2-b990-2b098be82119-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.843273 4852 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.879704 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: E1201 20:21:55.880415 4852 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle podName:d31bdc12-ed48-45e2-b990-2b098be82119 nodeName:}" failed. No retries permitted until 2025-12-01 20:21:56.380364363 +0000 UTC m=+1036.307445780 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119") : error deleting /var/lib/kubelet/pods/d31bdc12-ed48-45e2-b990-2b098be82119/volume-subpaths: remove /var/lib/kubelet/pods/d31bdc12-ed48-45e2-b990-2b098be82119/volume-subpaths: no such file or directory Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.880996 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts" (OuterVolumeSpecName: "scripts") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.945005 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d31bdc12-ed48-45e2-b990-2b098be82119-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.945045 4852 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:55 crc kubenswrapper[4852]: I1201 20:21:55.989837 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.168998 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8kwmk-config-hcdwt"] Dec 01 20:21:56 crc kubenswrapper[4852]: W1201 20:21:56.173331 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c4de746_0eba_4576_b0d4_086654552c8b.slice/crio-30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115 WatchSource:0}: Error finding container 30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115: Status 404 returned error can't find the container with id 30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115 Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.453168 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") pod \"d31bdc12-ed48-45e2-b990-2b098be82119\" (UID: \"d31bdc12-ed48-45e2-b990-2b098be82119\") " Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.461130 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d31bdc12-ed48-45e2-b990-2b098be82119" (UID: "d31bdc12-ed48-45e2-b990-2b098be82119"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.555830 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d31bdc12-ed48-45e2-b990-2b098be82119-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.821383 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rl4kg" event={"ID":"5c546551-9ac1-4462-a439-3a63b69d678d","Type":"ContainerStarted","Data":"02eafd83064cd0c2af3487eadab0e5997ec94c218567f5bd0a9aacf742161f69"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.825048 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk-config-hcdwt" event={"ID":"0c4de746-0eba-4576-b0d4-086654552c8b","Type":"ContainerStarted","Data":"ba8bed2cd04a2735dd513c523a66c1c0e173664b72404bbfce476806f8aebcf6"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.825086 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk-config-hcdwt" event={"ID":"0c4de746-0eba-4576-b0d4-086654552c8b","Type":"ContainerStarted","Data":"30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.828161 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerStarted","Data":"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.828433 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.833414 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"1a04653df0bcc7214618e9e910d25b600e203b9dcd47b75f590ecdabce8fd669"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.835854 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerStarted","Data":"93c507c659599d436d50bcc26bd42033cddc5958ea2eddf7768e09be98c42fb9"} Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.836342 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.844939 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rl4kg" podStartSLOduration=2.612649168 podStartE2EDuration="16.844909848s" podCreationTimestamp="2025-12-01 20:21:40 +0000 UTC" firstStartedPulling="2025-12-01 20:21:41.441682199 +0000 UTC m=+1021.368763616" lastFinishedPulling="2025-12-01 20:21:55.673942879 +0000 UTC m=+1035.601024296" observedRunningTime="2025-12-01 20:21:56.840655573 +0000 UTC m=+1036.767736980" watchObservedRunningTime="2025-12-01 20:21:56.844909848 +0000 UTC m=+1036.771991265" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.864374 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8kwmk-config-hcdwt" podStartSLOduration=6.864352812 podStartE2EDuration="6.864352812s" podCreationTimestamp="2025-12-01 20:21:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:21:56.854389487 +0000 UTC m=+1036.781470914" watchObservedRunningTime="2025-12-01 20:21:56.864352812 +0000 UTC m=+1036.791434229" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.897849 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=59.110731803 podStartE2EDuration="1m11.89782918s" podCreationTimestamp="2025-12-01 20:20:45 +0000 UTC" firstStartedPulling="2025-12-01 20:21:00.776424343 +0000 UTC m=+980.703505780" lastFinishedPulling="2025-12-01 20:21:13.56352173 +0000 UTC m=+993.490603157" observedRunningTime="2025-12-01 20:21:56.895000001 +0000 UTC m=+1036.822081418" watchObservedRunningTime="2025-12-01 20:21:56.89782918 +0000 UTC m=+1036.824910597" Dec 01 20:21:56 crc kubenswrapper[4852]: I1201 20:21:56.925434 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=61.39365898 podStartE2EDuration="1m12.925407302s" podCreationTimestamp="2025-12-01 20:20:44 +0000 UTC" firstStartedPulling="2025-12-01 20:21:01.489069356 +0000 UTC m=+981.416150773" lastFinishedPulling="2025-12-01 20:21:13.020817678 +0000 UTC m=+992.947899095" observedRunningTime="2025-12-01 20:21:56.92122848 +0000 UTC m=+1036.848309897" watchObservedRunningTime="2025-12-01 20:21:56.925407302 +0000 UTC m=+1036.852488719" Dec 01 20:21:57 crc kubenswrapper[4852]: I1201 20:21:57.847214 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"4fe8d3be122d1b2f7ada942f7b26354b60c86a067168362d2764b697f164d4a4"} Dec 01 20:21:57 crc kubenswrapper[4852]: I1201 20:21:57.848102 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"55f0e2e186a9b5bfaee3822709dae87ffa0f3abe5dbc8adebe27f2a701b9a29b"} Dec 01 20:21:57 crc kubenswrapper[4852]: I1201 20:21:57.850619 4852 generic.go:334] "Generic (PLEG): container finished" podID="0c4de746-0eba-4576-b0d4-086654552c8b" containerID="ba8bed2cd04a2735dd513c523a66c1c0e173664b72404bbfce476806f8aebcf6" exitCode=0 Dec 01 20:21:57 crc kubenswrapper[4852]: I1201 20:21:57.850742 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk-config-hcdwt" event={"ID":"0c4de746-0eba-4576-b0d4-086654552c8b","Type":"ContainerDied","Data":"ba8bed2cd04a2735dd513c523a66c1c0e173664b72404bbfce476806f8aebcf6"} Dec 01 20:21:58 crc kubenswrapper[4852]: I1201 20:21:58.890116 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"7a42bb3e45cde8b5e90a11613ab4614505005d05464f327cb2933792c1e48bdd"} Dec 01 20:21:58 crc kubenswrapper[4852]: I1201 20:21:58.890547 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"ab16f6e6d140a3082927f725661a5906956e7e7e61ef19c6aa9192df506402fa"} Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.488532 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612144 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612225 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612285 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612329 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612324 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612366 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run" (OuterVolumeSpecName: "var-run") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612350 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612415 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.612491 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fxcr\" (UniqueName: \"kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr\") pod \"0c4de746-0eba-4576-b0d4-086654552c8b\" (UID: \"0c4de746-0eba-4576-b0d4-086654552c8b\") " Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.613004 4852 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.613031 4852 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.613047 4852 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0c4de746-0eba-4576-b0d4-086654552c8b-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.613369 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.613527 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts" (OuterVolumeSpecName: "scripts") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.621863 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr" (OuterVolumeSpecName: "kube-api-access-2fxcr") pod "0c4de746-0eba-4576-b0d4-086654552c8b" (UID: "0c4de746-0eba-4576-b0d4-086654552c8b"). InnerVolumeSpecName "kube-api-access-2fxcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.716757 4852 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.717182 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c4de746-0eba-4576-b0d4-086654552c8b-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.717193 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fxcr\" (UniqueName: \"kubernetes.io/projected/0c4de746-0eba-4576-b0d4-086654552c8b-kube-api-access-2fxcr\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.906077 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8kwmk-config-hcdwt" event={"ID":"0c4de746-0eba-4576-b0d4-086654552c8b","Type":"ContainerDied","Data":"30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115"} Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.906131 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30b09886f0f07cc742b5232c849ce2344ef9ab740b808f723f4e81b35b743115" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.906146 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8kwmk-config-hcdwt" Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.911317 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"031ff42c212eab3fefbe192a633a066c729f81d0e758ba1c3be251e1a1243ce5"} Dec 01 20:21:59 crc kubenswrapper[4852]: I1201 20:21:59.911356 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"6d6aa7fc90387cf91b57012e401cb1e3a5e508cae20b0788aa86cd93fbf58b9b"} Dec 01 20:22:00 crc kubenswrapper[4852]: I1201 20:22:00.340189 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8kwmk" Dec 01 20:22:00 crc kubenswrapper[4852]: I1201 20:22:00.617643 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8kwmk-config-hcdwt"] Dec 01 20:22:00 crc kubenswrapper[4852]: I1201 20:22:00.625834 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8kwmk-config-hcdwt"] Dec 01 20:22:00 crc kubenswrapper[4852]: I1201 20:22:00.924394 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"bccd5a3f8b2bfb0b3af277ca39cec5b0dba752c858bf8153e7a8271f28cfd2b9"} Dec 01 20:22:00 crc kubenswrapper[4852]: I1201 20:22:00.924484 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"a4e56999fd3d0693e33a081e9794e86a01997fe595ab9d143692826f99e11e5f"} Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.331874 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c4de746-0eba-4576-b0d4-086654552c8b" path="/var/lib/kubelet/pods/0c4de746-0eba-4576-b0d4-086654552c8b/volumes" Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.948173 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"dbd5862bf9901474ec25d40593181f79ba9d9073a865d1cd0414dae0017f992b"} Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.948568 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"445ed26639176feddd65a1c1b000a3cc024dd4c60a42c943847c474273f0a0e6"} Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.948650 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"f71e04d21c3fb7fc76a33305c61a8f024f861b078d319301050116fa901ba445"} Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.948711 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"ef4e72793711dfca04cc4d5936e40ddde472b1f59cfd919d5ffe4e5233ffa1c6"} Dec 01 20:22:02 crc kubenswrapper[4852]: I1201 20:22:02.948771 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"163473eda84858c4e6aaaa951a3c074d3f8d75807c3b63c972a8c665a6de5c53"} Dec 01 20:22:03 crc kubenswrapper[4852]: I1201 20:22:03.973398 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"6e0f029e2ba060b8f61b345744d672bf60a96d981add3f4ef51ffaa84c97a885"} Dec 01 20:22:03 crc kubenswrapper[4852]: I1201 20:22:03.973846 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0eec4983-f32d-4858-a382-eacc49d726fd","Type":"ContainerStarted","Data":"0bbd75ed497595aa76fcf408680ee70ffc7d4460744bf634e1ecc7e716a2a11b"} Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.018406 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=26.256067909 podStartE2EDuration="32.018379756s" podCreationTimestamp="2025-12-01 20:21:32 +0000 UTC" firstStartedPulling="2025-12-01 20:21:56.019614115 +0000 UTC m=+1035.946695532" lastFinishedPulling="2025-12-01 20:22:01.781925962 +0000 UTC m=+1041.709007379" observedRunningTime="2025-12-01 20:22:04.010251158 +0000 UTC m=+1043.937332575" watchObservedRunningTime="2025-12-01 20:22:04.018379756 +0000 UTC m=+1043.945461173" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.371177 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:04 crc kubenswrapper[4852]: E1201 20:22:04.372017 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c4de746-0eba-4576-b0d4-086654552c8b" containerName="ovn-config" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.372037 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c4de746-0eba-4576-b0d4-086654552c8b" containerName="ovn-config" Dec 01 20:22:04 crc kubenswrapper[4852]: E1201 20:22:04.372059 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d31bdc12-ed48-45e2-b990-2b098be82119" containerName="swift-ring-rebalance" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.372067 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d31bdc12-ed48-45e2-b990-2b098be82119" containerName="swift-ring-rebalance" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.372575 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c4de746-0eba-4576-b0d4-086654552c8b" containerName="ovn-config" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.372603 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="d31bdc12-ed48-45e2-b990-2b098be82119" containerName="swift-ring-rebalance" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.375571 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.380879 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.390284 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.508206 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.508637 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v672p\" (UniqueName: \"kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.508803 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.508932 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.509034 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.509142 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.610955 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.611048 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.611120 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v672p\" (UniqueName: \"kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.611174 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.611235 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.611258 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.612444 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.612793 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.613011 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.613057 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.613440 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.639010 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v672p\" (UniqueName: \"kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p\") pod \"dnsmasq-dns-6dccfc5657-w2l72\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.708318 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:04 crc kubenswrapper[4852]: I1201 20:22:04.997279 4852 generic.go:334] "Generic (PLEG): container finished" podID="5c546551-9ac1-4462-a439-3a63b69d678d" containerID="02eafd83064cd0c2af3487eadab0e5997ec94c218567f5bd0a9aacf742161f69" exitCode=0 Dec 01 20:22:05 crc kubenswrapper[4852]: I1201 20:22:04.999547 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rl4kg" event={"ID":"5c546551-9ac1-4462-a439-3a63b69d678d","Type":"ContainerDied","Data":"02eafd83064cd0c2af3487eadab0e5997ec94c218567f5bd0a9aacf742161f69"} Dec 01 20:22:05 crc kubenswrapper[4852]: I1201 20:22:05.378578 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:05 crc kubenswrapper[4852]: W1201 20:22:05.386577 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8561c866_ee4d_4939_86b9_fde9b1200c41.slice/crio-0959b217b449adcddc22542c28fb905bb8ff513275c3f8e21ba3d5abdc60a9e4 WatchSource:0}: Error finding container 0959b217b449adcddc22542c28fb905bb8ff513275c3f8e21ba3d5abdc60a9e4: Status 404 returned error can't find the container with id 0959b217b449adcddc22542c28fb905bb8ff513275c3f8e21ba3d5abdc60a9e4 Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.008816 4852 generic.go:334] "Generic (PLEG): container finished" podID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerID="17408a12fbab1ea993f1ce5d1276d072ca2a7a37d6fa562e594d49ac76e491a7" exitCode=0 Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.008887 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" event={"ID":"8561c866-ee4d-4939-86b9-fde9b1200c41","Type":"ContainerDied","Data":"17408a12fbab1ea993f1ce5d1276d072ca2a7a37d6fa562e594d49ac76e491a7"} Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.009427 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" event={"ID":"8561c866-ee4d-4939-86b9-fde9b1200c41","Type":"ContainerStarted","Data":"0959b217b449adcddc22542c28fb905bb8ff513275c3f8e21ba3d5abdc60a9e4"} Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.258737 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.450685 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rl4kg" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.546853 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzfqj\" (UniqueName: \"kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj\") pod \"5c546551-9ac1-4462-a439-3a63b69d678d\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.546921 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data\") pod \"5c546551-9ac1-4462-a439-3a63b69d678d\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.546971 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle\") pod \"5c546551-9ac1-4462-a439-3a63b69d678d\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.547202 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data\") pod \"5c546551-9ac1-4462-a439-3a63b69d678d\" (UID: \"5c546551-9ac1-4462-a439-3a63b69d678d\") " Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.555899 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj" (OuterVolumeSpecName: "kube-api-access-tzfqj") pod "5c546551-9ac1-4462-a439-3a63b69d678d" (UID: "5c546551-9ac1-4462-a439-3a63b69d678d"). InnerVolumeSpecName "kube-api-access-tzfqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.556996 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5c546551-9ac1-4462-a439-3a63b69d678d" (UID: "5c546551-9ac1-4462-a439-3a63b69d678d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.580108 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c546551-9ac1-4462-a439-3a63b69d678d" (UID: "5c546551-9ac1-4462-a439-3a63b69d678d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.594855 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.596095 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data" (OuterVolumeSpecName: "config-data") pod "5c546551-9ac1-4462-a439-3a63b69d678d" (UID: "5c546551-9ac1-4462-a439-3a63b69d678d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.649663 4852 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.649721 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzfqj\" (UniqueName: \"kubernetes.io/projected/5c546551-9ac1-4462-a439-3a63b69d678d-kube-api-access-tzfqj\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.649736 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:06 crc kubenswrapper[4852]: I1201 20:22:06.649747 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c546551-9ac1-4462-a439-3a63b69d678d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.034693 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" event={"ID":"8561c866-ee4d-4939-86b9-fde9b1200c41","Type":"ContainerStarted","Data":"89d7c965849432dc17d2b36dff0aacf992139f75161df7c195955fd390d6781c"} Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.035619 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.039069 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rl4kg" event={"ID":"5c546551-9ac1-4462-a439-3a63b69d678d","Type":"ContainerDied","Data":"7fb54bcdf649e5a61d196f16d05671d33758f3b3f7f0c96c777933277471b6eb"} Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.039113 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fb54bcdf649e5a61d196f16d05671d33758f3b3f7f0c96c777933277471b6eb" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.039216 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rl4kg" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.069115 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" podStartSLOduration=3.069081623 podStartE2EDuration="3.069081623s" podCreationTimestamp="2025-12-01 20:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:07.060438499 +0000 UTC m=+1046.987519916" watchObservedRunningTime="2025-12-01 20:22:07.069081623 +0000 UTC m=+1046.996163040" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.594848 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.674199 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:07 crc kubenswrapper[4852]: E1201 20:22:07.674724 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c546551-9ac1-4462-a439-3a63b69d678d" containerName="glance-db-sync" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.674753 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c546551-9ac1-4462-a439-3a63b69d678d" containerName="glance-db-sync" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.674974 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c546551-9ac1-4462-a439-3a63b69d678d" containerName="glance-db-sync" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.685056 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.705492 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870321 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870394 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870438 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wztm\" (UniqueName: \"kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870498 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870700 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.870847 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.972769 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wztm\" (UniqueName: \"kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.972847 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.972880 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.972916 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.972978 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.973019 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.974213 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.974259 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.974263 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.974550 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:07 crc kubenswrapper[4852]: I1201 20:22:07.974716 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.003804 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wztm\" (UniqueName: \"kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm\") pod \"dnsmasq-dns-b6bbf7467-z5crl\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.004689 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.467193 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-qs9h7"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.469145 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.479624 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qs9h7"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.571197 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-8x2jx"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.572632 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.590608 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.590656 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbv6r\" (UniqueName: \"kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.591528 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1aea-account-create-update-5cmc8"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.602117 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.612840 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.613565 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.656921 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8x2jx"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.692257 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v525b\" (UniqueName: \"kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.692866 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.693011 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.693099 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbv6r\" (UniqueName: \"kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.694186 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.704533 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1aea-account-create-update-5cmc8"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.725906 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-89ab-account-create-update-jpkjq"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.729945 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.731570 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbv6r\" (UniqueName: \"kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r\") pod \"cinder-db-create-qs9h7\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.736170 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.736972 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-89ab-account-create-update-jpkjq"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.786296 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rrvx5"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.787618 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.795269 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf8bn\" (UniqueName: \"kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.795385 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v525b\" (UniqueName: \"kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.795424 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.795518 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.796405 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.804953 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.810838 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rrvx5"] Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.817625 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v525b\" (UniqueName: \"kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b\") pod \"barbican-db-create-8x2jx\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.895959 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.897979 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898122 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fkgp\" (UniqueName: \"kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf8bn\" (UniqueName: \"kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898495 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898666 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898703 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8gx8\" (UniqueName: \"kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.898785 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:08 crc kubenswrapper[4852]: I1201 20:22:08.932269 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf8bn\" (UniqueName: \"kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn\") pod \"cinder-1aea-account-create-update-5cmc8\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.001712 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.004193 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.004249 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8gx8\" (UniqueName: \"kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.004719 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fkgp\" (UniqueName: \"kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.006115 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.002835 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.020476 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.060310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8gx8\" (UniqueName: \"kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8\") pod \"barbican-89ab-account-create-update-jpkjq\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.072040 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-0a48-account-create-update-r5sp2"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.076737 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.082327 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.083655 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.086663 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fkgp\" (UniqueName: \"kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp\") pod \"neutron-db-create-rrvx5\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.116216 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="dnsmasq-dns" containerID="cri-o://89d7c965849432dc17d2b36dff0aacf992139f75161df7c195955fd390d6781c" gracePeriod=10 Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.117282 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" event={"ID":"41e5d9d4-837b-4407-8eee-4d6c44e1e20c","Type":"ContainerStarted","Data":"21abc31ca73b51516c46395d903d5a55533b0a56c6e984e0f0af0913d0d4e29d"} Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.116442 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77h6w\" (UniqueName: \"kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.118274 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.119087 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.162672 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-0a48-account-create-update-r5sp2"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.191212 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-mx887"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.198950 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.201427 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.208164 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.208270 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-tqglv" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.208477 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.213565 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mx887"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.221023 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.221529 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.221651 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.221692 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsnmj\" (UniqueName: \"kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.221721 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77h6w\" (UniqueName: \"kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.235389 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.269109 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77h6w\" (UniqueName: \"kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w\") pod \"neutron-0a48-account-create-update-r5sp2\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.282205 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.324280 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.324352 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsnmj\" (UniqueName: \"kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.324475 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.332371 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.340776 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.350864 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsnmj\" (UniqueName: \"kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj\") pod \"keystone-db-sync-mx887\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.422906 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qs9h7"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.611904 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:09 crc kubenswrapper[4852]: W1201 20:22:09.653344 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf50c4872_2332_4f7d_9df1_99b9b1b639a6.slice/crio-cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de WatchSource:0}: Error finding container cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de: Status 404 returned error can't find the container with id cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.659564 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8x2jx"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.832909 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-89ab-account-create-update-jpkjq"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.931905 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1aea-account-create-update-5cmc8"] Dec 01 20:22:09 crc kubenswrapper[4852]: I1201 20:22:09.951620 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rrvx5"] Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.003514 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mx887"] Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.013439 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-0a48-account-create-update-r5sp2"] Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.138205 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mx887" event={"ID":"31c7a083-b879-47d4-ad43-67019e4a9ecf","Type":"ContainerStarted","Data":"d65016bd17fb7703e1f2932b1877b86feffdc82c864a4f1407adde8422a40375"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.140763 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrvx5" event={"ID":"ecb2597e-20f0-472e-9a8e-100301abb115","Type":"ContainerStarted","Data":"fac03ff1608bfbfbb447edf0c2922bd86fcfcb6eb2904ecef52332ecd11d08d2"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.145167 4852 generic.go:334] "Generic (PLEG): container finished" podID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerID="1f900b176e494dab4b9de7ba7df62acc2d5a222173b1798070aa89420bfd73d5" exitCode=0 Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.145321 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" event={"ID":"41e5d9d4-837b-4407-8eee-4d6c44e1e20c","Type":"ContainerDied","Data":"1f900b176e494dab4b9de7ba7df62acc2d5a222173b1798070aa89420bfd73d5"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.165315 4852 generic.go:334] "Generic (PLEG): container finished" podID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerID="89d7c965849432dc17d2b36dff0aacf992139f75161df7c195955fd390d6781c" exitCode=0 Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.165510 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" event={"ID":"8561c866-ee4d-4939-86b9-fde9b1200c41","Type":"ContainerDied","Data":"89d7c965849432dc17d2b36dff0aacf992139f75161df7c195955fd390d6781c"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.167941 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1aea-account-create-update-5cmc8" event={"ID":"dcc105dd-813a-4de9-a0a5-b76287276b3b","Type":"ContainerStarted","Data":"5866ecf281e3db1dba0628999aafae4fa842f41ddc5882c5511f3bc6e54dd7c7"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.173665 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8x2jx" event={"ID":"f50c4872-2332-4f7d-9df1-99b9b1b639a6","Type":"ContainerStarted","Data":"27bfeb0b0613393041ce9d634938987e72a639bc472fe920b0746bea0d21bc9a"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.173715 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8x2jx" event={"ID":"f50c4872-2332-4f7d-9df1-99b9b1b639a6","Type":"ContainerStarted","Data":"cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.202494 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0a48-account-create-update-r5sp2" event={"ID":"a4c08895-1b79-4672-ab94-f7d53d17616a","Type":"ContainerStarted","Data":"f81d1e51534624ea8c297de747d83bacc9e953a80d605bcc111ee74ea02b1a11"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.208647 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qs9h7" event={"ID":"41f82f81-1abd-4196-9018-aec00a3d1b99","Type":"ContainerStarted","Data":"a5171b95ee3ba14699831acd900e1e8b4fe71e02aefcb21cc2391df28eb08710"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.208699 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qs9h7" event={"ID":"41f82f81-1abd-4196-9018-aec00a3d1b99","Type":"ContainerStarted","Data":"3f1d844472d46dfe77e19f28fce129be0ba1e230e6af518c1bd007d007461541"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.229528 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-89ab-account-create-update-jpkjq" event={"ID":"aa141992-beaa-4f60-993a-1aa6b520aff5","Type":"ContainerStarted","Data":"8f037756fd6516bc00249b1a074e0df6ae55f2781d5ecfaeb202732cf8c27727"} Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.235744 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-8x2jx" podStartSLOduration=2.235712975 podStartE2EDuration="2.235712975s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:10.224858422 +0000 UTC m=+1050.151939849" watchObservedRunningTime="2025-12-01 20:22:10.235712975 +0000 UTC m=+1050.162794392" Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.262507 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-qs9h7" podStartSLOduration=2.26247772 podStartE2EDuration="2.26247772s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:10.258602138 +0000 UTC m=+1050.185683555" watchObservedRunningTime="2025-12-01 20:22:10.26247772 +0000 UTC m=+1050.189559137" Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.463541 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572361 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v672p\" (UniqueName: \"kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572420 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572533 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572602 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572631 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.572697 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc\") pod \"8561c866-ee4d-4939-86b9-fde9b1200c41\" (UID: \"8561c866-ee4d-4939-86b9-fde9b1200c41\") " Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.693051 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p" (OuterVolumeSpecName: "kube-api-access-v672p") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "kube-api-access-v672p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:10 crc kubenswrapper[4852]: I1201 20:22:10.777937 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v672p\" (UniqueName: \"kubernetes.io/projected/8561c866-ee4d-4939-86b9-fde9b1200c41-kube-api-access-v672p\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:10.999596 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.016637 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.017813 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.028284 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.034913 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config" (OuterVolumeSpecName: "config") pod "8561c866-ee4d-4939-86b9-fde9b1200c41" (UID: "8561c866-ee4d-4939-86b9-fde9b1200c41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.085779 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.087396 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.087422 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.087508 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.087526 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8561c866-ee4d-4939-86b9-fde9b1200c41-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.243317 4852 generic.go:334] "Generic (PLEG): container finished" podID="41f82f81-1abd-4196-9018-aec00a3d1b99" containerID="a5171b95ee3ba14699831acd900e1e8b4fe71e02aefcb21cc2391df28eb08710" exitCode=0 Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.243486 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qs9h7" event={"ID":"41f82f81-1abd-4196-9018-aec00a3d1b99","Type":"ContainerDied","Data":"a5171b95ee3ba14699831acd900e1e8b4fe71e02aefcb21cc2391df28eb08710"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.247813 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-89ab-account-create-update-jpkjq" event={"ID":"aa141992-beaa-4f60-993a-1aa6b520aff5","Type":"ContainerStarted","Data":"6d620a02369483af0fc509c6a7b21e753cd840b93a7997a569ee818908b3c5c2"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.254263 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" event={"ID":"8561c866-ee4d-4939-86b9-fde9b1200c41","Type":"ContainerDied","Data":"0959b217b449adcddc22542c28fb905bb8ff513275c3f8e21ba3d5abdc60a9e4"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.254380 4852 scope.go:117] "RemoveContainer" containerID="89d7c965849432dc17d2b36dff0aacf992139f75161df7c195955fd390d6781c" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.254665 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccfc5657-w2l72" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.258717 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" event={"ID":"41e5d9d4-837b-4407-8eee-4d6c44e1e20c","Type":"ContainerStarted","Data":"f452cda2ec576d4e0c3a7fc42330501f5d02a2af0de2d78ea32148fcb30aca98"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.259818 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.262267 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1aea-account-create-update-5cmc8" event={"ID":"dcc105dd-813a-4de9-a0a5-b76287276b3b","Type":"ContainerStarted","Data":"ab3f717af585ef948d30990430349a3a5e35ef50b8591d8ef133c6ef4b2ac57c"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.267258 4852 generic.go:334] "Generic (PLEG): container finished" podID="f50c4872-2332-4f7d-9df1-99b9b1b639a6" containerID="27bfeb0b0613393041ce9d634938987e72a639bc472fe920b0746bea0d21bc9a" exitCode=0 Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.267305 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8x2jx" event={"ID":"f50c4872-2332-4f7d-9df1-99b9b1b639a6","Type":"ContainerDied","Data":"27bfeb0b0613393041ce9d634938987e72a639bc472fe920b0746bea0d21bc9a"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.269680 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrvx5" event={"ID":"ecb2597e-20f0-472e-9a8e-100301abb115","Type":"ContainerStarted","Data":"5627e0a19f80214530e2e4a292a9a7a1350552543765423ed882cd2dcc60aa57"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.273208 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0a48-account-create-update-r5sp2" event={"ID":"a4c08895-1b79-4672-ab94-f7d53d17616a","Type":"ContainerStarted","Data":"f9cc7e6eda9b98f182d9b297ad94b79a69bfa3c22c867e9ec1be9592e4067097"} Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.292127 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-89ab-account-create-update-jpkjq" podStartSLOduration=3.292101761 podStartE2EDuration="3.292101761s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:11.287048402 +0000 UTC m=+1051.214129879" watchObservedRunningTime="2025-12-01 20:22:11.292101761 +0000 UTC m=+1051.219183178" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.301501 4852 scope.go:117] "RemoveContainer" containerID="17408a12fbab1ea993f1ce5d1276d072ca2a7a37d6fa562e594d49ac76e491a7" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.326022 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-1aea-account-create-update-5cmc8" podStartSLOduration=3.325984742 podStartE2EDuration="3.325984742s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:11.314048106 +0000 UTC m=+1051.241129523" watchObservedRunningTime="2025-12-01 20:22:11.325984742 +0000 UTC m=+1051.253066159" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.339834 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" podStartSLOduration=4.339812599 podStartE2EDuration="4.339812599s" podCreationTimestamp="2025-12-01 20:22:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:11.337169346 +0000 UTC m=+1051.264250763" watchObservedRunningTime="2025-12-01 20:22:11.339812599 +0000 UTC m=+1051.266894016" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.361271 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-rrvx5" podStartSLOduration=3.361245297 podStartE2EDuration="3.361245297s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:11.36069572 +0000 UTC m=+1051.287777137" watchObservedRunningTime="2025-12-01 20:22:11.361245297 +0000 UTC m=+1051.288326704" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.377787 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-0a48-account-create-update-r5sp2" podStartSLOduration=3.377765439 podStartE2EDuration="3.377765439s" podCreationTimestamp="2025-12-01 20:22:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:11.375807817 +0000 UTC m=+1051.302889234" watchObservedRunningTime="2025-12-01 20:22:11.377765439 +0000 UTC m=+1051.304846856" Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.394893 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:11 crc kubenswrapper[4852]: I1201 20:22:11.408088 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dccfc5657-w2l72"] Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.292076 4852 generic.go:334] "Generic (PLEG): container finished" podID="dcc105dd-813a-4de9-a0a5-b76287276b3b" containerID="ab3f717af585ef948d30990430349a3a5e35ef50b8591d8ef133c6ef4b2ac57c" exitCode=0 Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.292336 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1aea-account-create-update-5cmc8" event={"ID":"dcc105dd-813a-4de9-a0a5-b76287276b3b","Type":"ContainerDied","Data":"ab3f717af585ef948d30990430349a3a5e35ef50b8591d8ef133c6ef4b2ac57c"} Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.299429 4852 generic.go:334] "Generic (PLEG): container finished" podID="ecb2597e-20f0-472e-9a8e-100301abb115" containerID="5627e0a19f80214530e2e4a292a9a7a1350552543765423ed882cd2dcc60aa57" exitCode=0 Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.299529 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrvx5" event={"ID":"ecb2597e-20f0-472e-9a8e-100301abb115","Type":"ContainerDied","Data":"5627e0a19f80214530e2e4a292a9a7a1350552543765423ed882cd2dcc60aa57"} Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.302355 4852 generic.go:334] "Generic (PLEG): container finished" podID="a4c08895-1b79-4672-ab94-f7d53d17616a" containerID="f9cc7e6eda9b98f182d9b297ad94b79a69bfa3c22c867e9ec1be9592e4067097" exitCode=0 Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.302441 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0a48-account-create-update-r5sp2" event={"ID":"a4c08895-1b79-4672-ab94-f7d53d17616a","Type":"ContainerDied","Data":"f9cc7e6eda9b98f182d9b297ad94b79a69bfa3c22c867e9ec1be9592e4067097"} Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.304135 4852 generic.go:334] "Generic (PLEG): container finished" podID="aa141992-beaa-4f60-993a-1aa6b520aff5" containerID="6d620a02369483af0fc509c6a7b21e753cd840b93a7997a569ee818908b3c5c2" exitCode=0 Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.304223 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-89ab-account-create-update-jpkjq" event={"ID":"aa141992-beaa-4f60-993a-1aa6b520aff5","Type":"ContainerDied","Data":"6d620a02369483af0fc509c6a7b21e753cd840b93a7997a569ee818908b3c5c2"} Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.335046 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" path="/var/lib/kubelet/pods/8561c866-ee4d-4939-86b9-fde9b1200c41/volumes" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.796505 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.817471 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.926122 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v525b\" (UniqueName: \"kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b\") pod \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.926328 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbv6r\" (UniqueName: \"kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r\") pod \"41f82f81-1abd-4196-9018-aec00a3d1b99\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.926362 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts\") pod \"41f82f81-1abd-4196-9018-aec00a3d1b99\" (UID: \"41f82f81-1abd-4196-9018-aec00a3d1b99\") " Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.926464 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts\") pod \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\" (UID: \"f50c4872-2332-4f7d-9df1-99b9b1b639a6\") " Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.927583 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f50c4872-2332-4f7d-9df1-99b9b1b639a6" (UID: "f50c4872-2332-4f7d-9df1-99b9b1b639a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.928024 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "41f82f81-1abd-4196-9018-aec00a3d1b99" (UID: "41f82f81-1abd-4196-9018-aec00a3d1b99"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.939007 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r" (OuterVolumeSpecName: "kube-api-access-jbv6r") pod "41f82f81-1abd-4196-9018-aec00a3d1b99" (UID: "41f82f81-1abd-4196-9018-aec00a3d1b99"). InnerVolumeSpecName "kube-api-access-jbv6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:12 crc kubenswrapper[4852]: I1201 20:22:12.943856 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b" (OuterVolumeSpecName: "kube-api-access-v525b") pod "f50c4872-2332-4f7d-9df1-99b9b1b639a6" (UID: "f50c4872-2332-4f7d-9df1-99b9b1b639a6"). InnerVolumeSpecName "kube-api-access-v525b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.030024 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbv6r\" (UniqueName: \"kubernetes.io/projected/41f82f81-1abd-4196-9018-aec00a3d1b99-kube-api-access-jbv6r\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.030087 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41f82f81-1abd-4196-9018-aec00a3d1b99-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.030105 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f50c4872-2332-4f7d-9df1-99b9b1b639a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.030120 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v525b\" (UniqueName: \"kubernetes.io/projected/f50c4872-2332-4f7d-9df1-99b9b1b639a6-kube-api-access-v525b\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.323236 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qs9h7" event={"ID":"41f82f81-1abd-4196-9018-aec00a3d1b99","Type":"ContainerDied","Data":"3f1d844472d46dfe77e19f28fce129be0ba1e230e6af518c1bd007d007461541"} Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.323320 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f1d844472d46dfe77e19f28fce129be0ba1e230e6af518c1bd007d007461541" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.323501 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qs9h7" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.326392 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8x2jx" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.326509 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8x2jx" event={"ID":"f50c4872-2332-4f7d-9df1-99b9b1b639a6","Type":"ContainerDied","Data":"cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de"} Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.326555 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb9e698422b7b5efc96c1fca7f986451bc157c04dfbbcbd169971978f6b2a3de" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.635990 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.745994 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf8bn\" (UniqueName: \"kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn\") pod \"dcc105dd-813a-4de9-a0a5-b76287276b3b\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.746548 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts\") pod \"dcc105dd-813a-4de9-a0a5-b76287276b3b\" (UID: \"dcc105dd-813a-4de9-a0a5-b76287276b3b\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.747249 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dcc105dd-813a-4de9-a0a5-b76287276b3b" (UID: "dcc105dd-813a-4de9-a0a5-b76287276b3b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.750517 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn" (OuterVolumeSpecName: "kube-api-access-pf8bn") pod "dcc105dd-813a-4de9-a0a5-b76287276b3b" (UID: "dcc105dd-813a-4de9-a0a5-b76287276b3b"). InnerVolumeSpecName "kube-api-access-pf8bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.832470 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.840000 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.850404 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.850535 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcc105dd-813a-4de9-a0a5-b76287276b3b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.850565 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf8bn\" (UniqueName: \"kubernetes.io/projected/dcc105dd-813a-4de9-a0a5-b76287276b3b-kube-api-access-pf8bn\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.951945 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77h6w\" (UniqueName: \"kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w\") pod \"a4c08895-1b79-4672-ab94-f7d53d17616a\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952000 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts\") pod \"a4c08895-1b79-4672-ab94-f7d53d17616a\" (UID: \"a4c08895-1b79-4672-ab94-f7d53d17616a\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952045 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts\") pod \"aa141992-beaa-4f60-993a-1aa6b520aff5\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952117 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8gx8\" (UniqueName: \"kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8\") pod \"aa141992-beaa-4f60-993a-1aa6b520aff5\" (UID: \"aa141992-beaa-4f60-993a-1aa6b520aff5\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952185 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts\") pod \"ecb2597e-20f0-472e-9a8e-100301abb115\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952279 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fkgp\" (UniqueName: \"kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp\") pod \"ecb2597e-20f0-472e-9a8e-100301abb115\" (UID: \"ecb2597e-20f0-472e-9a8e-100301abb115\") " Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.952876 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a4c08895-1b79-4672-ab94-f7d53d17616a" (UID: "a4c08895-1b79-4672-ab94-f7d53d17616a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.953298 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aa141992-beaa-4f60-993a-1aa6b520aff5" (UID: "aa141992-beaa-4f60-993a-1aa6b520aff5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.953410 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ecb2597e-20f0-472e-9a8e-100301abb115" (UID: "ecb2597e-20f0-472e-9a8e-100301abb115"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.956226 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp" (OuterVolumeSpecName: "kube-api-access-6fkgp") pod "ecb2597e-20f0-472e-9a8e-100301abb115" (UID: "ecb2597e-20f0-472e-9a8e-100301abb115"). InnerVolumeSpecName "kube-api-access-6fkgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.958160 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8" (OuterVolumeSpecName: "kube-api-access-l8gx8") pod "aa141992-beaa-4f60-993a-1aa6b520aff5" (UID: "aa141992-beaa-4f60-993a-1aa6b520aff5"). InnerVolumeSpecName "kube-api-access-l8gx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:13 crc kubenswrapper[4852]: I1201 20:22:13.960222 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w" (OuterVolumeSpecName: "kube-api-access-77h6w") pod "a4c08895-1b79-4672-ab94-f7d53d17616a" (UID: "a4c08895-1b79-4672-ab94-f7d53d17616a"). InnerVolumeSpecName "kube-api-access-77h6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054704 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fkgp\" (UniqueName: \"kubernetes.io/projected/ecb2597e-20f0-472e-9a8e-100301abb115-kube-api-access-6fkgp\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054750 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77h6w\" (UniqueName: \"kubernetes.io/projected/a4c08895-1b79-4672-ab94-f7d53d17616a-kube-api-access-77h6w\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054765 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a4c08895-1b79-4672-ab94-f7d53d17616a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054781 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa141992-beaa-4f60-993a-1aa6b520aff5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054795 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8gx8\" (UniqueName: \"kubernetes.io/projected/aa141992-beaa-4f60-993a-1aa6b520aff5-kube-api-access-l8gx8\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.054807 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecb2597e-20f0-472e-9a8e-100301abb115-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.342056 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1aea-account-create-update-5cmc8" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.342055 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1aea-account-create-update-5cmc8" event={"ID":"dcc105dd-813a-4de9-a0a5-b76287276b3b","Type":"ContainerDied","Data":"5866ecf281e3db1dba0628999aafae4fa842f41ddc5882c5511f3bc6e54dd7c7"} Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.343011 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5866ecf281e3db1dba0628999aafae4fa842f41ddc5882c5511f3bc6e54dd7c7" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.344291 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrvx5" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.344301 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrvx5" event={"ID":"ecb2597e-20f0-472e-9a8e-100301abb115","Type":"ContainerDied","Data":"fac03ff1608bfbfbb447edf0c2922bd86fcfcb6eb2904ecef52332ecd11d08d2"} Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.344362 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fac03ff1608bfbfbb447edf0c2922bd86fcfcb6eb2904ecef52332ecd11d08d2" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.361401 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0a48-account-create-update-r5sp2" event={"ID":"a4c08895-1b79-4672-ab94-f7d53d17616a","Type":"ContainerDied","Data":"f81d1e51534624ea8c297de747d83bacc9e953a80d605bcc111ee74ea02b1a11"} Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.361476 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f81d1e51534624ea8c297de747d83bacc9e953a80d605bcc111ee74ea02b1a11" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.361559 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0a48-account-create-update-r5sp2" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.364631 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-89ab-account-create-update-jpkjq" event={"ID":"aa141992-beaa-4f60-993a-1aa6b520aff5","Type":"ContainerDied","Data":"8f037756fd6516bc00249b1a074e0df6ae55f2781d5ecfaeb202732cf8c27727"} Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.364682 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-89ab-account-create-update-jpkjq" Dec 01 20:22:14 crc kubenswrapper[4852]: I1201 20:22:14.364685 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f037756fd6516bc00249b1a074e0df6ae55f2781d5ecfaeb202732cf8c27727" Dec 01 20:22:17 crc kubenswrapper[4852]: I1201 20:22:17.402244 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mx887" event={"ID":"31c7a083-b879-47d4-ad43-67019e4a9ecf","Type":"ContainerStarted","Data":"ee9613a8690b0e24a9b003b71f5d8f85710cd088eee11004c2292ae6cf3e55cf"} Dec 01 20:22:17 crc kubenswrapper[4852]: I1201 20:22:17.431143 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-mx887" podStartSLOduration=1.690656663 podStartE2EDuration="8.431119996s" podCreationTimestamp="2025-12-01 20:22:09 +0000 UTC" firstStartedPulling="2025-12-01 20:22:10.038345997 +0000 UTC m=+1049.965427414" lastFinishedPulling="2025-12-01 20:22:16.77880933 +0000 UTC m=+1056.705890747" observedRunningTime="2025-12-01 20:22:17.428779861 +0000 UTC m=+1057.355861318" watchObservedRunningTime="2025-12-01 20:22:17.431119996 +0000 UTC m=+1057.358201423" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.006666 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.089921 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.090268 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="dnsmasq-dns" containerID="cri-o://a69edb9fd8ac1ecb1ab526427647c93f3fe6f820b2ecdfd5c40dbda53325f0d8" gracePeriod=10 Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.415607 4852 generic.go:334] "Generic (PLEG): container finished" podID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerID="a69edb9fd8ac1ecb1ab526427647c93f3fe6f820b2ecdfd5c40dbda53325f0d8" exitCode=0 Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.416187 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" event={"ID":"2e2d3b9c-bafa-4691-ac33-7852955ea9ad","Type":"ContainerDied","Data":"a69edb9fd8ac1ecb1ab526427647c93f3fe6f820b2ecdfd5c40dbda53325f0d8"} Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.650195 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.747225 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb\") pod \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.747343 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxmhv\" (UniqueName: \"kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv\") pod \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.747426 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config\") pod \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.747477 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb\") pod \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.747508 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc\") pod \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\" (UID: \"2e2d3b9c-bafa-4691-ac33-7852955ea9ad\") " Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.773808 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv" (OuterVolumeSpecName: "kube-api-access-fxmhv") pod "2e2d3b9c-bafa-4691-ac33-7852955ea9ad" (UID: "2e2d3b9c-bafa-4691-ac33-7852955ea9ad"). InnerVolumeSpecName "kube-api-access-fxmhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.801603 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config" (OuterVolumeSpecName: "config") pod "2e2d3b9c-bafa-4691-ac33-7852955ea9ad" (UID: "2e2d3b9c-bafa-4691-ac33-7852955ea9ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.803834 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2e2d3b9c-bafa-4691-ac33-7852955ea9ad" (UID: "2e2d3b9c-bafa-4691-ac33-7852955ea9ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.815284 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2e2d3b9c-bafa-4691-ac33-7852955ea9ad" (UID: "2e2d3b9c-bafa-4691-ac33-7852955ea9ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.820169 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2e2d3b9c-bafa-4691-ac33-7852955ea9ad" (UID: "2e2d3b9c-bafa-4691-ac33-7852955ea9ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.850395 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.850712 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxmhv\" (UniqueName: \"kubernetes.io/projected/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-kube-api-access-fxmhv\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.850841 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.851147 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:18 crc kubenswrapper[4852]: I1201 20:22:18.851249 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e2d3b9c-bafa-4691-ac33-7852955ea9ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.426401 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" event={"ID":"2e2d3b9c-bafa-4691-ac33-7852955ea9ad","Type":"ContainerDied","Data":"1f66cb33700dc2765476ba6515ab9cf3befdaa195b1363e18f32f25df5d5a567"} Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.426947 4852 scope.go:117] "RemoveContainer" containerID="a69edb9fd8ac1ecb1ab526427647c93f3fe6f820b2ecdfd5c40dbda53325f0d8" Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.426596 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-kfqqq" Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.448230 4852 scope.go:117] "RemoveContainer" containerID="9bb626e26446a3c4d9b03971591a8117a63d91f5bdbc5c7b857e3ef5f1a204dd" Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.474176 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:22:19 crc kubenswrapper[4852]: I1201 20:22:19.480276 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-kfqqq"] Dec 01 20:22:20 crc kubenswrapper[4852]: I1201 20:22:20.348286 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" path="/var/lib/kubelet/pods/2e2d3b9c-bafa-4691-ac33-7852955ea9ad/volumes" Dec 01 20:22:21 crc kubenswrapper[4852]: I1201 20:22:21.447186 4852 generic.go:334] "Generic (PLEG): container finished" podID="31c7a083-b879-47d4-ad43-67019e4a9ecf" containerID="ee9613a8690b0e24a9b003b71f5d8f85710cd088eee11004c2292ae6cf3e55cf" exitCode=0 Dec 01 20:22:21 crc kubenswrapper[4852]: I1201 20:22:21.447344 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mx887" event={"ID":"31c7a083-b879-47d4-ad43-67019e4a9ecf","Type":"ContainerDied","Data":"ee9613a8690b0e24a9b003b71f5d8f85710cd088eee11004c2292ae6cf3e55cf"} Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.809811 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.921634 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data\") pod \"31c7a083-b879-47d4-ad43-67019e4a9ecf\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.921745 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsnmj\" (UniqueName: \"kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj\") pod \"31c7a083-b879-47d4-ad43-67019e4a9ecf\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.921838 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle\") pod \"31c7a083-b879-47d4-ad43-67019e4a9ecf\" (UID: \"31c7a083-b879-47d4-ad43-67019e4a9ecf\") " Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.938807 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj" (OuterVolumeSpecName: "kube-api-access-wsnmj") pod "31c7a083-b879-47d4-ad43-67019e4a9ecf" (UID: "31c7a083-b879-47d4-ad43-67019e4a9ecf"). InnerVolumeSpecName "kube-api-access-wsnmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.951873 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31c7a083-b879-47d4-ad43-67019e4a9ecf" (UID: "31c7a083-b879-47d4-ad43-67019e4a9ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:22 crc kubenswrapper[4852]: I1201 20:22:22.976946 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data" (OuterVolumeSpecName: "config-data") pod "31c7a083-b879-47d4-ad43-67019e4a9ecf" (UID: "31c7a083-b879-47d4-ad43-67019e4a9ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.024017 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.024068 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsnmj\" (UniqueName: \"kubernetes.io/projected/31c7a083-b879-47d4-ad43-67019e4a9ecf-kube-api-access-wsnmj\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.024084 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c7a083-b879-47d4-ad43-67019e4a9ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.467375 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mx887" event={"ID":"31c7a083-b879-47d4-ad43-67019e4a9ecf","Type":"ContainerDied","Data":"d65016bd17fb7703e1f2932b1877b86feffdc82c864a4f1407adde8422a40375"} Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.467760 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d65016bd17fb7703e1f2932b1877b86feffdc82c864a4f1407adde8422a40375" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.467628 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mx887" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.751131 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752049 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f82f81-1abd-4196-9018-aec00a3d1b99" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752068 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f82f81-1abd-4196-9018-aec00a3d1b99" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752083 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c08895-1b79-4672-ab94-f7d53d17616a" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752090 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c08895-1b79-4672-ab94-f7d53d17616a" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752113 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa141992-beaa-4f60-993a-1aa6b520aff5" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752120 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa141992-beaa-4f60-993a-1aa6b520aff5" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752136 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb2597e-20f0-472e-9a8e-100301abb115" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752142 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb2597e-20f0-472e-9a8e-100301abb115" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752150 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="init" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752156 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="init" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752170 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752175 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752186 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="init" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752193 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="init" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752204 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f50c4872-2332-4f7d-9df1-99b9b1b639a6" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752211 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f50c4872-2332-4f7d-9df1-99b9b1b639a6" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752225 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc105dd-813a-4de9-a0a5-b76287276b3b" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752241 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc105dd-813a-4de9-a0a5-b76287276b3b" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752258 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752266 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: E1201 20:22:23.752278 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c7a083-b879-47d4-ad43-67019e4a9ecf" containerName="keystone-db-sync" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752300 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c7a083-b879-47d4-ad43-67019e4a9ecf" containerName="keystone-db-sync" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752515 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4c08895-1b79-4672-ab94-f7d53d17616a" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752535 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecb2597e-20f0-472e-9a8e-100301abb115" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752549 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f50c4872-2332-4f7d-9df1-99b9b1b639a6" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752556 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa141992-beaa-4f60-993a-1aa6b520aff5" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752564 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="41f82f81-1abd-4196-9018-aec00a3d1b99" containerName="mariadb-database-create" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752574 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e2d3b9c-bafa-4691-ac33-7852955ea9ad" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752584 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="31c7a083-b879-47d4-ad43-67019e4a9ecf" containerName="keystone-db-sync" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752595 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc105dd-813a-4de9-a0a5-b76287276b3b" containerName="mariadb-account-create-update" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.752607 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8561c866-ee4d-4939-86b9-fde9b1200c41" containerName="dnsmasq-dns" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.753766 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.770885 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tcm74"] Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.772220 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.779241 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.779541 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.779658 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.783246 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.783729 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-tqglv" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.783897 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.819815 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tcm74"] Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.847411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kchv6\" (UniqueName: \"kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.847986 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.848153 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.848372 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.848648 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.848811 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950515 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kchv6\" (UniqueName: \"kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950580 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950611 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950661 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjbbp\" (UniqueName: \"kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950701 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950731 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950764 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950787 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950807 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950830 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950847 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.950867 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.951989 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.952022 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.952050 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.952728 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.953557 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.964708 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.972135 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.990548 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-m5jwp" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.990620 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.990739 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 01 20:22:23 crc kubenswrapper[4852]: I1201 20:22:23.990801 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.019544 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-k8mp8"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.021241 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.027033 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.027288 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nnk52" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.027445 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.028304 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.036823 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kchv6\" (UniqueName: \"kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6\") pod \"dnsmasq-dns-76bb7864cf-bsxq6\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056086 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056165 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056206 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056239 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056264 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.056340 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjbbp\" (UniqueName: \"kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.068619 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.069261 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.072315 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.089320 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.097975 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.098698 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-k8mp8"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.105187 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.124375 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjbbp\" (UniqueName: \"kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp\") pod \"keystone-bootstrap-tcm74\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.159729 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t265b\" (UniqueName: \"kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.159825 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.159930 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.160006 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.160033 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhzjd\" (UniqueName: \"kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.160075 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.160098 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.160140 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.214289 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-ml69l"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.215655 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.222789 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.223048 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-cqj4j" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.223153 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.251507 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ml69l"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261810 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261884 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261911 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhzjd\" (UniqueName: \"kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261939 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261953 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.261983 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.262024 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t265b\" (UniqueName: \"kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.262048 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.266794 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.267393 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.267716 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.282072 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-fh7zl"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.282278 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.283956 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.287415 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.291946 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.292070 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.299828 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.299981 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rcdz5" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.300108 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.308177 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fh7zl"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.313547 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.313578 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.351560 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhzjd\" (UniqueName: \"kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd\") pod \"horizon-6bd8d6ff69-cmc78\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.353355 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t265b\" (UniqueName: \"kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b\") pod \"neutron-db-sync-k8mp8\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.413566 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.416686 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432501 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxmkk\" (UniqueName: \"kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432591 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432670 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432741 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432826 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432889 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.432938 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.433054 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.433101 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-762k9\" (UniqueName: \"kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.433258 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.453326 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540364 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540412 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540443 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540469 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540493 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdsc9\" (UniqueName: \"kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540528 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540555 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540605 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540623 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-762k9\" (UniqueName: \"kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540663 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540686 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540714 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxmkk\" (UniqueName: \"kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.540755 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.541926 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.544544 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9mp8s"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.546086 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.546474 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.549526 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.550555 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.551044 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.553310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.554112 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.556712 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ddn7d" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.556707 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.557042 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.557250 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.564734 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9mp8s"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.565737 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.570662 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.573093 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.583417 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.583664 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.583760 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.585507 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.601025 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.624757 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-762k9\" (UniqueName: \"kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9\") pod \"cinder-db-sync-ml69l\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.627725 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646022 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646089 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646127 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646164 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdsc9\" (UniqueName: \"kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646195 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.646215 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxmkk\" (UniqueName: \"kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk\") pod \"horizon-748b5c8cbf-8ph4c\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.649562 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.650150 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.654804 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.668583 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.670881 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.677948 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.678255 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.688719 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.689108 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.689273 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.697062 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.699290 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdsc9\" (UniqueName: \"kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9\") pod \"placement-db-sync-fh7zl\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.715020 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-h82jn" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765141 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765269 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fsvs\" (UniqueName: \"kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765313 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765345 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765390 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpbqd\" (UniqueName: \"kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765496 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765577 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765616 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765642 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765671 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765699 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g86nr\" (UniqueName: \"kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765731 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765767 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765795 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765863 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.765890 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.811053 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.817174 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh7zl" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.867854 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.867906 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.867927 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.867953 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.867987 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868017 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868034 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868051 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868070 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868088 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g86nr\" (UniqueName: \"kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868110 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868132 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868148 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868180 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868200 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868217 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868240 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868260 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868291 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fsvs\" (UniqueName: \"kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868308 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868331 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868352 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdv87\" (UniqueName: \"kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868368 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.868395 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpbqd\" (UniqueName: \"kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.869553 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.870184 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.875536 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.877478 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.877604 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.878251 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.885288 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.887006 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ml69l" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.888174 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.888428 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.889069 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.895245 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.914295 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.914818 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.922592 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g86nr\" (UniqueName: \"kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr\") pod \"barbican-db-sync-9mp8s\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.934100 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fsvs\" (UniqueName: \"kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs\") pod \"ceilometer-0\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.964430 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpbqd\" (UniqueName: \"kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd\") pod \"dnsmasq-dns-5f5d458b55-7d2nd\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.972957 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.974832 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.974878 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.974901 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.974967 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.975018 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.975052 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.975094 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.975122 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdv87\" (UniqueName: \"kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.976844 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.982678 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:24 crc kubenswrapper[4852]: I1201 20:22:24.986299 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.036072 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.048207 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.049702 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.053789 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.054353 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.060421 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.066151 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdv87\" (UniqueName: \"kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.083837 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.159674 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.164037 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.170506 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.171469 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.191784 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.215441 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.284223 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.284500 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285027 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcw4c\" (UniqueName: \"kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285064 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285116 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285142 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285167 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.285318 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.375850 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tcm74"] Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.380016 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390159 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390207 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390228 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390319 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390374 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390395 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390445 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcw4c\" (UniqueName: \"kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.390485 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.392585 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.393733 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.394118 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.401054 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.404415 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.427755 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.457654 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.461185 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcw4c\" (UniqueName: \"kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.555098 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.562527 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" event={"ID":"393700ee-7231-4aed-ab49-1d7a0bbbda87","Type":"ContainerStarted","Data":"7ecafc34b327c8ad14f6685e6c829c8c2f788211690431fb498bf14a1667c27f"} Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.590154 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-k8mp8"] Dec 01 20:22:25 crc kubenswrapper[4852]: W1201 20:22:25.604156 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb92d0010_4851_4c54_ae36_42314f5ddb92.slice/crio-ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed WatchSource:0}: Error finding container ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed: Status 404 returned error can't find the container with id ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.630810 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:25 crc kubenswrapper[4852]: I1201 20:22:25.946403 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fh7zl"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.302491 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.317957 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ml69l"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.367021 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.367070 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.368891 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.377402 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9mp8s"] Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.586445 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tcm74" event={"ID":"b92d0010-4851-4c54-ae36-42314f5ddb92","Type":"ContainerStarted","Data":"ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.589102 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ml69l" event={"ID":"b531141a-eca1-4f9f-a67a-68d48d92add9","Type":"ContainerStarted","Data":"97bd0243d04ca5155ad14ef2a3874c0574111edc3898fab28f0768c649900826"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.592589 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh7zl" event={"ID":"54be94f0-c30e-4a21-9a22-a055b5e6154f","Type":"ContainerStarted","Data":"8d6a5854318e1a8d581073faa7d96b16f773d4a29f4c2386e8e3b868947c3d7b"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.595123 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" event={"ID":"553feaf1-d70f-46b8-89db-a7a141377b01","Type":"ContainerStarted","Data":"e66480bc68d019161982ab58f4ebf340f6ddd216b7dd36cb5d95ea51df53ed33"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.599989 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerStarted","Data":"7677c355aae1352fb2227378bf3d7c0d21a1e06b990d310e22e462a86137eddd"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.614316 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748b5c8cbf-8ph4c" event={"ID":"57a8115d-5012-49be-8a94-ca231d1d4a54","Type":"ContainerStarted","Data":"c619bc7c8c73c26f741a0e95bc97dfb52d0614a629c5f070dd1703e32cb2c8d1"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.618846 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerStarted","Data":"ecebd211af3bb5460de2097a3ff37cc213c3ca7c3f6a3a406320fc19c250d11f"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.620763 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-k8mp8" event={"ID":"2ea8042f-95a3-43d9-a653-6c61fc239d8e","Type":"ContainerStarted","Data":"76f76f412af7a6b5cebe8d3a920a2347e0a86749932c1716bca18cf015e99287"} Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.632942 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9mp8s" event={"ID":"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58","Type":"ContainerStarted","Data":"5695a82c9f465ed3f5c6ec4a49cae3e05a45f1db5687e8d26e19e3f986371608"} Dec 01 20:22:26 crc kubenswrapper[4852]: W1201 20:22:26.645851 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55ba3967_e137_4f35_a4ab_c5dd092dd4c9.slice/crio-bf564c97186720b386be0397b2e8c38ae2061285e8e670b16e8bbfe23fbb71e9 WatchSource:0}: Error finding container bf564c97186720b386be0397b2e8c38ae2061285e8e670b16e8bbfe23fbb71e9: Status 404 returned error can't find the container with id bf564c97186720b386be0397b2e8c38ae2061285e8e670b16e8bbfe23fbb71e9 Dec 01 20:22:26 crc kubenswrapper[4852]: I1201 20:22:26.647966 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.120072 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.154086 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.180152 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.252993 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.253739 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.253920 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdg8d\" (UniqueName: \"kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.254171 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.254497 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.254538 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.305320 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.335490 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.356722 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.356806 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdg8d\" (UniqueName: \"kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.356833 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.356918 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.356938 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.357484 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.364758 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.365402 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: W1201 20:22:27.382877 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80a61661_a693_437c_a752_7807e7875798.slice/crio-0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5 WatchSource:0}: Error finding container 0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5: Status 404 returned error can't find the container with id 0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5 Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.384940 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.423832 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.444374 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.453158 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdg8d\" (UniqueName: \"kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d\") pod \"horizon-768f876787-6snnf\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.519981 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.718306 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerStarted","Data":"bf564c97186720b386be0397b2e8c38ae2061285e8e670b16e8bbfe23fbb71e9"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.753523 4852 generic.go:334] "Generic (PLEG): container finished" podID="393700ee-7231-4aed-ab49-1d7a0bbbda87" containerID="70bcba9bd6817b4ba6869c76b2040c6b88d803976eef018ac710599f365314cc" exitCode=0 Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.753700 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" event={"ID":"393700ee-7231-4aed-ab49-1d7a0bbbda87","Type":"ContainerDied","Data":"70bcba9bd6817b4ba6869c76b2040c6b88d803976eef018ac710599f365314cc"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.787143 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tcm74" event={"ID":"b92d0010-4851-4c54-ae36-42314f5ddb92","Type":"ContainerStarted","Data":"708bc0d25bab9090fcc2fea698be48d59f3abb410de4a18ae9633f7a71c6f6d0"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.806469 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-k8mp8" event={"ID":"2ea8042f-95a3-43d9-a653-6c61fc239d8e","Type":"ContainerStarted","Data":"a59c2abd917cdc0571dc449618c923c4c354ac5e03ae5d0d1080f93684d9de74"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.850532 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerStarted","Data":"0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.861396 4852 generic.go:334] "Generic (PLEG): container finished" podID="553feaf1-d70f-46b8-89db-a7a141377b01" containerID="cd3cd2fe4272140c28b9d4dbc5c83e3f656d8dfc26b71ddf8a255959aab5aa06" exitCode=0 Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.861546 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" event={"ID":"553feaf1-d70f-46b8-89db-a7a141377b01","Type":"ContainerDied","Data":"cd3cd2fe4272140c28b9d4dbc5c83e3f656d8dfc26b71ddf8a255959aab5aa06"} Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.874312 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tcm74" podStartSLOduration=4.874277541 podStartE2EDuration="4.874277541s" podCreationTimestamp="2025-12-01 20:22:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:27.857744148 +0000 UTC m=+1067.784825565" watchObservedRunningTime="2025-12-01 20:22:27.874277541 +0000 UTC m=+1067.801358958" Dec 01 20:22:27 crc kubenswrapper[4852]: I1201 20:22:27.897660 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-k8mp8" podStartSLOduration=4.897634239 podStartE2EDuration="4.897634239s" podCreationTimestamp="2025-12-01 20:22:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:27.886314972 +0000 UTC m=+1067.813396389" watchObservedRunningTime="2025-12-01 20:22:27.897634239 +0000 UTC m=+1067.824715656" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.473021 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.506063 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:28 crc kubenswrapper[4852]: W1201 20:22:28.508992 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0d059ae_91d8_40a7_862b_7204dfc0b420.slice/crio-49ee3e596d6feeff5317311e37747ad295d4490e3ebe12bd74e6692911aeed44 WatchSource:0}: Error finding container 49ee3e596d6feeff5317311e37747ad295d4490e3ebe12bd74e6692911aeed44: Status 404 returned error can't find the container with id 49ee3e596d6feeff5317311e37747ad295d4490e3ebe12bd74e6692911aeed44 Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705271 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705389 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kchv6\" (UniqueName: \"kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705487 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705540 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705647 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.705721 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc\") pod \"393700ee-7231-4aed-ab49-1d7a0bbbda87\" (UID: \"393700ee-7231-4aed-ab49-1d7a0bbbda87\") " Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.714807 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6" (OuterVolumeSpecName: "kube-api-access-kchv6") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "kube-api-access-kchv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.741312 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.751715 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.753124 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config" (OuterVolumeSpecName: "config") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.755953 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.778661 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "393700ee-7231-4aed-ab49-1d7a0bbbda87" (UID: "393700ee-7231-4aed-ab49-1d7a0bbbda87"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808746 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808807 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kchv6\" (UniqueName: \"kubernetes.io/projected/393700ee-7231-4aed-ab49-1d7a0bbbda87-kube-api-access-kchv6\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808827 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808840 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808852 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.808863 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393700ee-7231-4aed-ab49-1d7a0bbbda87-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.877097 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerStarted","Data":"49ee3e596d6feeff5317311e37747ad295d4490e3ebe12bd74e6692911aeed44"} Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.879495 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerStarted","Data":"1224d2952cc87952ab77dd6334a2cc42d78db20f149919a26d646a5e205cc06c"} Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.883143 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.891402 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76bb7864cf-bsxq6" event={"ID":"393700ee-7231-4aed-ab49-1d7a0bbbda87","Type":"ContainerDied","Data":"7ecafc34b327c8ad14f6685e6c829c8c2f788211690431fb498bf14a1667c27f"} Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.891523 4852 scope.go:117] "RemoveContainer" containerID="70bcba9bd6817b4ba6869c76b2040c6b88d803976eef018ac710599f365314cc" Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.965516 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:28 crc kubenswrapper[4852]: I1201 20:22:28.977669 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76bb7864cf-bsxq6"] Dec 01 20:22:30 crc kubenswrapper[4852]: I1201 20:22:30.354293 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="393700ee-7231-4aed-ab49-1d7a0bbbda87" path="/var/lib/kubelet/pods/393700ee-7231-4aed-ab49-1d7a0bbbda87/volumes" Dec 01 20:22:30 crc kubenswrapper[4852]: I1201 20:22:30.942846 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerStarted","Data":"527408c5ce0bc6d7ccdf8972392efd9fd18b13e5b12687f10cd2173a391268db"} Dec 01 20:22:30 crc kubenswrapper[4852]: I1201 20:22:30.954527 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" event={"ID":"553feaf1-d70f-46b8-89db-a7a141377b01","Type":"ContainerStarted","Data":"1966432c165e2d879fa973f536e9b94fe0f1fa19b654677eccc855f2b3d20c3a"} Dec 01 20:22:30 crc kubenswrapper[4852]: I1201 20:22:30.955169 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:30 crc kubenswrapper[4852]: I1201 20:22:30.980752 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" podStartSLOduration=6.980728461 podStartE2EDuration="6.980728461s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:30.974318058 +0000 UTC m=+1070.901399485" watchObservedRunningTime="2025-12-01 20:22:30.980728461 +0000 UTC m=+1070.907809878" Dec 01 20:22:31 crc kubenswrapper[4852]: I1201 20:22:31.979696 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerStarted","Data":"fa1052cd1acdc8c0f0cf40722e1fcbdcb221869f50add7c4db2335a447ebd001"} Dec 01 20:22:31 crc kubenswrapper[4852]: I1201 20:22:31.979847 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-log" containerID="cri-o://1224d2952cc87952ab77dd6334a2cc42d78db20f149919a26d646a5e205cc06c" gracePeriod=30 Dec 01 20:22:31 crc kubenswrapper[4852]: I1201 20:22:31.979922 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-httpd" containerID="cri-o://fa1052cd1acdc8c0f0cf40722e1fcbdcb221869f50add7c4db2335a447ebd001" gracePeriod=30 Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.017782 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.017748865 podStartE2EDuration="8.017748865s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:31.997072382 +0000 UTC m=+1071.924153799" watchObservedRunningTime="2025-12-01 20:22:32.017748865 +0000 UTC m=+1071.944830282" Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.992339 4852 generic.go:334] "Generic (PLEG): container finished" podID="b92d0010-4851-4c54-ae36-42314f5ddb92" containerID="708bc0d25bab9090fcc2fea698be48d59f3abb410de4a18ae9633f7a71c6f6d0" exitCode=0 Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.992464 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tcm74" event={"ID":"b92d0010-4851-4c54-ae36-42314f5ddb92","Type":"ContainerDied","Data":"708bc0d25bab9090fcc2fea698be48d59f3abb410de4a18ae9633f7a71c6f6d0"} Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.999717 4852 generic.go:334] "Generic (PLEG): container finished" podID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerID="fa1052cd1acdc8c0f0cf40722e1fcbdcb221869f50add7c4db2335a447ebd001" exitCode=0 Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.999750 4852 generic.go:334] "Generic (PLEG): container finished" podID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerID="1224d2952cc87952ab77dd6334a2cc42d78db20f149919a26d646a5e205cc06c" exitCode=143 Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.999775 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerDied","Data":"fa1052cd1acdc8c0f0cf40722e1fcbdcb221869f50add7c4db2335a447ebd001"} Dec 01 20:22:32 crc kubenswrapper[4852]: I1201 20:22:32.999802 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerDied","Data":"1224d2952cc87952ab77dd6334a2cc42d78db20f149919a26d646a5e205cc06c"} Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.865691 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.905918 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:22:33 crc kubenswrapper[4852]: E1201 20:22:33.908594 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="393700ee-7231-4aed-ab49-1d7a0bbbda87" containerName="init" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.908625 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="393700ee-7231-4aed-ab49-1d7a0bbbda87" containerName="init" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.908874 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="393700ee-7231-4aed-ab49-1d7a0bbbda87" containerName="init" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.910033 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.916615 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.920179 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.965974 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktzcv\" (UniqueName: \"kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966047 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966073 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966097 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966187 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966229 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.966264 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:33 crc kubenswrapper[4852]: I1201 20:22:33.996433 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.041357 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dfd6f888-xxwbg"] Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.043268 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.056095 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfd6f888-xxwbg"] Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068176 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160a77b2-5ec6-4223-b939-8e90b339f530-logs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068236 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-config-data\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068270 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktzcv\" (UniqueName: \"kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068297 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068321 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068340 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068363 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-combined-ca-bundle\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068397 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-scripts\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068425 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-tls-certs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068464 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068494 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068516 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s84d5\" (UniqueName: \"kubernetes.io/projected/160a77b2-5ec6-4223-b939-8e90b339f530-kube-api-access-s84d5\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068549 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-secret-key\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.068569 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.069962 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.073840 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.074712 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.083622 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.085630 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.144605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.152623 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktzcv\" (UniqueName: \"kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv\") pod \"horizon-d7844c8bb-bfdj8\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.170557 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-config-data\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.170672 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-combined-ca-bundle\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.170977 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-scripts\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.174132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-config-data\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.175245 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-tls-certs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.175418 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s84d5\" (UniqueName: \"kubernetes.io/projected/160a77b2-5ec6-4223-b939-8e90b339f530-kube-api-access-s84d5\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.175530 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-secret-key\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.175683 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160a77b2-5ec6-4223-b939-8e90b339f530-logs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.175951 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/160a77b2-5ec6-4223-b939-8e90b339f530-scripts\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.176130 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160a77b2-5ec6-4223-b939-8e90b339f530-logs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.178277 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-combined-ca-bundle\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.180713 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-secret-key\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.190191 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/160a77b2-5ec6-4223-b939-8e90b339f530-horizon-tls-certs\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.203255 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s84d5\" (UniqueName: \"kubernetes.io/projected/160a77b2-5ec6-4223-b939-8e90b339f530-kube-api-access-s84d5\") pod \"horizon-dfd6f888-xxwbg\" (UID: \"160a77b2-5ec6-4223-b939-8e90b339f530\") " pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.239831 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:22:34 crc kubenswrapper[4852]: I1201 20:22:34.368191 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:22:35 crc kubenswrapper[4852]: I1201 20:22:35.039690 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:22:35 crc kubenswrapper[4852]: I1201 20:22:35.122534 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:35 crc kubenswrapper[4852]: I1201 20:22:35.122886 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" containerID="cri-o://f452cda2ec576d4e0c3a7fc42330501f5d02a2af0de2d78ea32148fcb30aca98" gracePeriod=10 Dec 01 20:22:36 crc kubenswrapper[4852]: I1201 20:22:36.055891 4852 generic.go:334] "Generic (PLEG): container finished" podID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerID="f452cda2ec576d4e0c3a7fc42330501f5d02a2af0de2d78ea32148fcb30aca98" exitCode=0 Dec 01 20:22:36 crc kubenswrapper[4852]: I1201 20:22:36.055957 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" event={"ID":"41e5d9d4-837b-4407-8eee-4d6c44e1e20c","Type":"ContainerDied","Data":"f452cda2ec576d4e0c3a7fc42330501f5d02a2af0de2d78ea32148fcb30aca98"} Dec 01 20:22:38 crc kubenswrapper[4852]: I1201 20:22:38.010058 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.124:5353: connect: connection refused" Dec 01 20:22:43 crc kubenswrapper[4852]: E1201 20:22:42.800070 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23" Dec 01 20:22:43 crc kubenswrapper[4852]: E1201 20:22:42.800990 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n54bh569hffh88h5f5h688h684h584h686hc7h59fh55fh5d5h84h94h564h57fh7ch86h77h649h6fhd6h5d8h566h698h688hd5h67dh68bh697h586q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4fsvs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(fa21b178-0e31-4c64-a0d7-59e622aa958e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:22:43 crc kubenswrapper[4852]: E1201 20:22:42.901250 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:4ed0fa186a77e467c019bdf196933b38d3ea35c46a8e19b5a6700c77580714b9" Dec 01 20:22:43 crc kubenswrapper[4852]: E1201 20:22:42.901671 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:4ed0fa186a77e467c019bdf196933b38d3ea35c46a8e19b5a6700c77580714b9,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n55ch67h5cdhfch86h5b8h577h59bhbchd7h5d9h5d5h7bh677h59dh9ch4h65fh5d6h55dhc9h59fh77h694h594h6bh569h669h5b8h677hf4h698q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xxmkk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-748b5c8cbf-8ph4c_openstack(57a8115d-5012-49be-8a94-ca231d1d4a54): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:22:43 crc kubenswrapper[4852]: E1201 20:22:42.907230 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:4ed0fa186a77e467c019bdf196933b38d3ea35c46a8e19b5a6700c77580714b9\\\"\"]" pod="openstack/horizon-748b5c8cbf-8ph4c" podUID="57a8115d-5012-49be-8a94-ca231d1d4a54" Dec 01 20:22:43 crc kubenswrapper[4852]: I1201 20:22:43.010131 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.124:5353: connect: connection refused" Dec 01 20:22:43 crc kubenswrapper[4852]: I1201 20:22:43.130702 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-log" containerID="cri-o://527408c5ce0bc6d7ccdf8972392efd9fd18b13e5b12687f10cd2173a391268db" gracePeriod=30 Dec 01 20:22:43 crc kubenswrapper[4852]: I1201 20:22:43.131042 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerStarted","Data":"62fc2e6867cf2cdbefdb5687379aa7408afaafc9da417a686f338f6a58f912e2"} Dec 01 20:22:43 crc kubenswrapper[4852]: I1201 20:22:43.131391 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-httpd" containerID="cri-o://62fc2e6867cf2cdbefdb5687379aa7408afaafc9da417a686f338f6a58f912e2" gracePeriod=30 Dec 01 20:22:43 crc kubenswrapper[4852]: I1201 20:22:43.180002 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.179977398 podStartE2EDuration="19.179977398s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:43.174260547 +0000 UTC m=+1083.101341964" watchObservedRunningTime="2025-12-01 20:22:43.179977398 +0000 UTC m=+1083.107058825" Dec 01 20:22:44 crc kubenswrapper[4852]: E1201 20:22:44.068495 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb" Dec 01 20:22:44 crc kubenswrapper[4852]: E1201 20:22:44.068844 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g86nr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-9mp8s_openstack(622dd636-8b05-4a9c-aa97-0fb5fd5d0c58): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:22:44 crc kubenswrapper[4852]: E1201 20:22:44.070215 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-9mp8s" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.148514 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tcm74" event={"ID":"b92d0010-4851-4c54-ae36-42314f5ddb92","Type":"ContainerDied","Data":"ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed"} Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.148585 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca02a43e7f4955a4e4e43d93264b3ac0972879d0acf3105e0518c907308179ed" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.151686 4852 generic.go:334] "Generic (PLEG): container finished" podID="80a61661-a693-437c-a752-7807e7875798" containerID="62fc2e6867cf2cdbefdb5687379aa7408afaafc9da417a686f338f6a58f912e2" exitCode=0 Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.151725 4852 generic.go:334] "Generic (PLEG): container finished" podID="80a61661-a693-437c-a752-7807e7875798" containerID="527408c5ce0bc6d7ccdf8972392efd9fd18b13e5b12687f10cd2173a391268db" exitCode=143 Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.151794 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerDied","Data":"62fc2e6867cf2cdbefdb5687379aa7408afaafc9da417a686f338f6a58f912e2"} Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.151867 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerDied","Data":"527408c5ce0bc6d7ccdf8972392efd9fd18b13e5b12687f10cd2173a391268db"} Dec 01 20:22:44 crc kubenswrapper[4852]: E1201 20:22:44.155116 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb\\\"\"" pod="openstack/barbican-db-sync-9mp8s" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.272841 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.281016 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.290253 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.402915 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data\") pod \"57a8115d-5012-49be-8a94-ca231d1d4a54\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.402984 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403019 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs\") pod \"57a8115d-5012-49be-8a94-ca231d1d4a54\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403099 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403135 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403196 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403217 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403234 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403275 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxmkk\" (UniqueName: \"kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk\") pod \"57a8115d-5012-49be-8a94-ca231d1d4a54\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403311 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key\") pod \"57a8115d-5012-49be-8a94-ca231d1d4a54\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403349 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjbbp\" (UniqueName: \"kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403385 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403481 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403519 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403510 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data" (OuterVolumeSpecName: "config-data") pod "57a8115d-5012-49be-8a94-ca231d1d4a54" (UID: "57a8115d-5012-49be-8a94-ca231d1d4a54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403596 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle\") pod \"b92d0010-4851-4c54-ae36-42314f5ddb92\" (UID: \"b92d0010-4851-4c54-ae36-42314f5ddb92\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403651 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdv87\" (UniqueName: \"kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403677 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts\") pod \"57a8115d-5012-49be-8a94-ca231d1d4a54\" (UID: \"57a8115d-5012-49be-8a94-ca231d1d4a54\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403707 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403733 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data\") pod \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\" (UID: \"55ba3967-e137-4f35-a4ab-c5dd092dd4c9\") " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.403898 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs" (OuterVolumeSpecName: "logs") pod "57a8115d-5012-49be-8a94-ca231d1d4a54" (UID: "57a8115d-5012-49be-8a94-ca231d1d4a54"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.404299 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57a8115d-5012-49be-8a94-ca231d1d4a54-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.404318 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.413434 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.414127 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts" (OuterVolumeSpecName: "scripts") pod "57a8115d-5012-49be-8a94-ca231d1d4a54" (UID: "57a8115d-5012-49be-8a94-ca231d1d4a54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.414531 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs" (OuterVolumeSpecName: "logs") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.414722 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87" (OuterVolumeSpecName: "kube-api-access-vdv87") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "kube-api-access-vdv87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.414988 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.418611 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts" (OuterVolumeSpecName: "scripts") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.419422 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "57a8115d-5012-49be-8a94-ca231d1d4a54" (UID: "57a8115d-5012-49be-8a94-ca231d1d4a54"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.419928 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.421380 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk" (OuterVolumeSpecName: "kube-api-access-xxmkk") pod "57a8115d-5012-49be-8a94-ca231d1d4a54" (UID: "57a8115d-5012-49be-8a94-ca231d1d4a54"). InnerVolumeSpecName "kube-api-access-xxmkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.421562 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp" (OuterVolumeSpecName: "kube-api-access-xjbbp") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "kube-api-access-xjbbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.423550 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts" (OuterVolumeSpecName: "scripts") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.425319 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.456049 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.462560 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data" (OuterVolumeSpecName: "config-data") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.464606 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b92d0010-4851-4c54-ae36-42314f5ddb92" (UID: "b92d0010-4851-4c54-ae36-42314f5ddb92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.483801 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.493646 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data" (OuterVolumeSpecName: "config-data") pod "55ba3967-e137-4f35-a4ab-c5dd092dd4c9" (UID: "55ba3967-e137-4f35-a4ab-c5dd092dd4c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506734 4852 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506796 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506812 4852 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506827 4852 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506842 4852 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506856 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506870 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxmkk\" (UniqueName: \"kubernetes.io/projected/57a8115d-5012-49be-8a94-ca231d1d4a54-kube-api-access-xxmkk\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506887 4852 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/57a8115d-5012-49be-8a94-ca231d1d4a54-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506898 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjbbp\" (UniqueName: \"kubernetes.io/projected/b92d0010-4851-4c54-ae36-42314f5ddb92-kube-api-access-xjbbp\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506909 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506932 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506942 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506953 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92d0010-4851-4c54-ae36-42314f5ddb92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506964 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdv87\" (UniqueName: \"kubernetes.io/projected/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-kube-api-access-vdv87\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506978 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57a8115d-5012-49be-8a94-ca231d1d4a54-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.506988 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.507000 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ba3967-e137-4f35-a4ab-c5dd092dd4c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.530960 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 01 20:22:44 crc kubenswrapper[4852]: I1201 20:22:44.609112 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.188357 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"55ba3967-e137-4f35-a4ab-c5dd092dd4c9","Type":"ContainerDied","Data":"bf564c97186720b386be0397b2e8c38ae2061285e8e670b16e8bbfe23fbb71e9"} Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.188861 4852 scope.go:117] "RemoveContainer" containerID="fa1052cd1acdc8c0f0cf40722e1fcbdcb221869f50add7c4db2335a447ebd001" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.189082 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.203780 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tcm74" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.206662 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-748b5c8cbf-8ph4c" event={"ID":"57a8115d-5012-49be-8a94-ca231d1d4a54","Type":"ContainerDied","Data":"c619bc7c8c73c26f741a0e95bc97dfb52d0614a629c5f070dd1703e32cb2c8d1"} Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.206814 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-748b5c8cbf-8ph4c" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.289957 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.297923 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.327254 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:45 crc kubenswrapper[4852]: E1201 20:22:45.327916 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-log" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.327944 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-log" Dec 01 20:22:45 crc kubenswrapper[4852]: E1201 20:22:45.327980 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-httpd" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.327990 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-httpd" Dec 01 20:22:45 crc kubenswrapper[4852]: E1201 20:22:45.328023 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b92d0010-4851-4c54-ae36-42314f5ddb92" containerName="keystone-bootstrap" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.328032 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b92d0010-4851-4c54-ae36-42314f5ddb92" containerName="keystone-bootstrap" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.328284 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-httpd" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.328313 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" containerName="glance-log" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.328333 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b92d0010-4851-4c54-ae36-42314f5ddb92" containerName="keystone-bootstrap" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.329725 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.339969 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.340164 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.379263 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.392829 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.403106 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-748b5c8cbf-8ph4c"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.446328 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tcm74"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451549 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451621 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451662 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451712 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451734 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451760 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7trmh\" (UniqueName: \"kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451786 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.451820 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.470435 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tcm74"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.495841 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6lqj8"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.497424 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.500646 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.500995 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.501064 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.501187 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.507264 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-tqglv" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.537841 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6lqj8"] Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554162 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554231 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554269 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7trmh\" (UniqueName: \"kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554292 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554339 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554494 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554530 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554562 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.554916 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.555530 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.557153 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.570634 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.576810 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.577595 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.578484 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.581812 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7trmh\" (UniqueName: \"kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.603021 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656132 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656186 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656240 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656437 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656532 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.656703 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfqtb\" (UniqueName: \"kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.694180 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758409 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758485 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758570 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfqtb\" (UniqueName: \"kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758651 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758673 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.758733 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.763477 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.765486 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.768476 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.768488 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.774365 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.778906 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfqtb\" (UniqueName: \"kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb\") pod \"keystone-bootstrap-6lqj8\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:45 crc kubenswrapper[4852]: I1201 20:22:45.824320 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:22:46 crc kubenswrapper[4852]: I1201 20:22:46.332863 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55ba3967-e137-4f35-a4ab-c5dd092dd4c9" path="/var/lib/kubelet/pods/55ba3967-e137-4f35-a4ab-c5dd092dd4c9/volumes" Dec 01 20:22:46 crc kubenswrapper[4852]: I1201 20:22:46.334338 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a8115d-5012-49be-8a94-ca231d1d4a54" path="/var/lib/kubelet/pods/57a8115d-5012-49be-8a94-ca231d1d4a54/volumes" Dec 01 20:22:46 crc kubenswrapper[4852]: I1201 20:22:46.334761 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b92d0010-4851-4c54-ae36-42314f5ddb92" path="/var/lib/kubelet/pods/b92d0010-4851-4c54-ae36-42314f5ddb92/volumes" Dec 01 20:22:50 crc kubenswrapper[4852]: I1201 20:22:50.229994 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:22:50 crc kubenswrapper[4852]: I1201 20:22:50.230888 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.292108 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"80a61661-a693-437c-a752-7807e7875798","Type":"ContainerDied","Data":"0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5"} Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.292669 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0311217f48f634aa58c85f5dd1d5806c102db6a2506b2eef3b26cfa6930cf8d5" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.297136 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" event={"ID":"41e5d9d4-837b-4407-8eee-4d6c44e1e20c","Type":"ContainerDied","Data":"21abc31ca73b51516c46395d903d5a55533b0a56c6e984e0f0af0913d0d4e29d"} Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.297167 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21abc31ca73b51516c46395d903d5a55533b0a56c6e984e0f0af0913d0d4e29d" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.299556 4852 generic.go:334] "Generic (PLEG): container finished" podID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" containerID="a59c2abd917cdc0571dc449618c923c4c354ac5e03ae5d0d1080f93684d9de74" exitCode=0 Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.299589 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-k8mp8" event={"ID":"2ea8042f-95a3-43d9-a653-6c61fc239d8e","Type":"ContainerDied","Data":"a59c2abd917cdc0571dc449618c923c4c354ac5e03ae5d0d1080f93684d9de74"} Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.373028 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.384775 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.511718 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.511885 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcw4c\" (UniqueName: \"kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.511940 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.511982 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.512040 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.512856 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs" (OuterVolumeSpecName: "logs") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.512069 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513175 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513205 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513315 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513344 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513402 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513440 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wztm\" (UniqueName: \"kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm\") pod \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\" (UID: \"41e5d9d4-837b-4407-8eee-4d6c44e1e20c\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513505 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.513575 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data\") pod \"80a61661-a693-437c-a752-7807e7875798\" (UID: \"80a61661-a693-437c-a752-7807e7875798\") " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.514305 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.520238 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c" (OuterVolumeSpecName: "kube-api-access-mcw4c") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "kube-api-access-mcw4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.520633 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.520807 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts" (OuterVolumeSpecName: "scripts") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.522513 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.544920 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm" (OuterVolumeSpecName: "kube-api-access-7wztm") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "kube-api-access-7wztm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.572253 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.575443 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.579388 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data" (OuterVolumeSpecName: "config-data") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.589189 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.590431 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.591599 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config" (OuterVolumeSpecName: "config") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.598582 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "80a61661-a693-437c-a752-7807e7875798" (UID: "80a61661-a693-437c-a752-7807e7875798"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.602687 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "41e5d9d4-837b-4407-8eee-4d6c44e1e20c" (UID: "41e5d9d4-837b-4407-8eee-4d6c44e1e20c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616759 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wztm\" (UniqueName: \"kubernetes.io/projected/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-kube-api-access-7wztm\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616796 4852 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80a61661-a693-437c-a752-7807e7875798-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616807 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616849 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616860 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcw4c\" (UniqueName: \"kubernetes.io/projected/80a61661-a693-437c-a752-7807e7875798-kube-api-access-mcw4c\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616870 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616878 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616891 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616900 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616908 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616918 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616927 4852 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a61661-a693-437c-a752-7807e7875798-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.616940 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e5d9d4-837b-4407-8eee-4d6c44e1e20c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.637218 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 01 20:22:52 crc kubenswrapper[4852]: I1201 20:22:52.719108 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.005506 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.124:5353: i/o timeout" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.005652 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.308796 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6bbf7467-z5crl" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.308961 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.375134 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.386707 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b6bbf7467-z5crl"] Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.409561 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.438868 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0" Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.439077 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-762k9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-ml69l_openstack(b531141a-eca1-4f9f-a67a-68d48d92add9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.439435 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.447175 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.441166 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-ml69l" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.447790 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-httpd" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.447815 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-httpd" Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.447851 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="init" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.447862 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="init" Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.447894 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-log" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.460425 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-log" Dec 01 20:22:53 crc kubenswrapper[4852]: E1201 20:22:53.460518 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.460528 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.460964 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-httpd" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.460984 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" containerName="dnsmasq-dns" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.461028 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="80a61661-a693-437c-a752-7807e7875798" containerName="glance-log" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.466009 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.466434 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.471786 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.471801 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.480563 4852 scope.go:117] "RemoveContainer" containerID="1224d2952cc87952ab77dd6334a2cc42d78db20f149919a26d646a5e205cc06c" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.640431 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzzz6\" (UniqueName: \"kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641025 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641067 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641103 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641169 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641194 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641218 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.641288 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743247 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743318 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743353 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743430 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743494 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzzz6\" (UniqueName: \"kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743523 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743552 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743579 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.743938 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.744075 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.744364 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.753050 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.753246 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.753962 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.758132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.762853 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzzz6\" (UniqueName: \"kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.774256 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.795445 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.915968 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:53 crc kubenswrapper[4852]: I1201 20:22:53.939346 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.029599 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfd6f888-xxwbg"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.049588 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t265b\" (UniqueName: \"kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b\") pod \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.049666 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle\") pod \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.049874 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config\") pod \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\" (UID: \"2ea8042f-95a3-43d9-a653-6c61fc239d8e\") " Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.059006 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b" (OuterVolumeSpecName: "kube-api-access-t265b") pod "2ea8042f-95a3-43d9-a653-6c61fc239d8e" (UID: "2ea8042f-95a3-43d9-a653-6c61fc239d8e"). InnerVolumeSpecName "kube-api-access-t265b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.087076 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config" (OuterVolumeSpecName: "config") pod "2ea8042f-95a3-43d9-a653-6c61fc239d8e" (UID: "2ea8042f-95a3-43d9-a653-6c61fc239d8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.107075 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ea8042f-95a3-43d9-a653-6c61fc239d8e" (UID: "2ea8042f-95a3-43d9-a653-6c61fc239d8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.152644 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t265b\" (UniqueName: \"kubernetes.io/projected/2ea8042f-95a3-43d9-a653-6c61fc239d8e-kube-api-access-t265b\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.152691 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.152704 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ea8042f-95a3-43d9-a653-6c61fc239d8e-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.336040 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.346745 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e5d9d4-837b-4407-8eee-4d6c44e1e20c" path="/var/lib/kubelet/pods/41e5d9d4-837b-4407-8eee-4d6c44e1e20c/volumes" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.348528 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80a61661-a693-437c-a752-7807e7875798" path="/var/lib/kubelet/pods/80a61661-a693-437c-a752-7807e7875798/volumes" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.353006 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd6f888-xxwbg" event={"ID":"160a77b2-5ec6-4223-b939-8e90b339f530","Type":"ContainerStarted","Data":"cbc0faa9597893ce077c27926766c95aed6ff42e64f04f70b9ffa6fb8891e43b"} Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.353043 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-k8mp8" event={"ID":"2ea8042f-95a3-43d9-a653-6c61fc239d8e","Type":"ContainerDied","Data":"76f76f412af7a6b5cebe8d3a920a2347e0a86749932c1716bca18cf015e99287"} Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.353060 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76f76f412af7a6b5cebe8d3a920a2347e0a86749932c1716bca18cf015e99287" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.353074 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerStarted","Data":"3961654c8c5f939d8b0f779cf13ad5d3ce347e75c2f6cab4b671166bd0e4f8e1"} Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.357332 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerStarted","Data":"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50"} Dec 01 20:22:54 crc kubenswrapper[4852]: E1201 20:22:54.359299 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0\\\"\"" pod="openstack/cinder-db-sync-ml69l" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.371421 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6lqj8"] Dec 01 20:22:54 crc kubenswrapper[4852]: W1201 20:22:54.384688 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7feb09eb_34d4_4b20_9904_bcde9ec4a9db.slice/crio-b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417 WatchSource:0}: Error finding container b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417: Status 404 returned error can't find the container with id b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417 Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.389264 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-fh7zl" podStartSLOduration=3.010307185 podStartE2EDuration="30.389215055s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="2025-12-01 20:22:25.951029137 +0000 UTC m=+1065.878110544" lastFinishedPulling="2025-12-01 20:22:53.329937007 +0000 UTC m=+1093.257018414" observedRunningTime="2025-12-01 20:22:54.378679752 +0000 UTC m=+1094.305761169" watchObservedRunningTime="2025-12-01 20:22:54.389215055 +0000 UTC m=+1094.316296472" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.569972 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:22:54 crc kubenswrapper[4852]: E1201 20:22:54.570527 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" containerName="neutron-db-sync" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.570541 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" containerName="neutron-db-sync" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.570756 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" containerName="neutron-db-sync" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.571834 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.582930 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.664091 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.667362 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.675501 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nnk52" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.675804 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.675919 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.675963 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678035 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678274 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678348 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njlpd\" (UniqueName: \"kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678394 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678606 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.678631 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.703681 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.714604 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.785878 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trl4r\" (UniqueName: \"kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.785934 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786008 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786027 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786070 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786096 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786123 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786200 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786233 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786269 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njlpd\" (UniqueName: \"kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.786374 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.787251 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.787370 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.787515 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.787931 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.789520 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.820785 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njlpd\" (UniqueName: \"kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd\") pod \"dnsmasq-dns-f8dc44d89-b8cwd\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.889746 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trl4r\" (UniqueName: \"kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.889827 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.889901 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.889933 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.890036 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.895403 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.896613 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.896918 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.897684 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.921123 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trl4r\" (UniqueName: \"kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r\") pod \"neutron-5cd4765d7b-hrwph\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:54 crc kubenswrapper[4852]: I1201 20:22:54.995420 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.220770 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.426837 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerStarted","Data":"53f1ad250140f7a408ffbae9efff4621c0fb798e1b424e263b82766eb722e4b6"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.430784 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh7zl" event={"ID":"54be94f0-c30e-4a21-9a22-a055b5e6154f","Type":"ContainerStarted","Data":"0832d29e188960d4ec25f7544e39bea226190242c52c6d164b84ef4bf9af1895"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.455383 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerStarted","Data":"5d2857c8b031fdea53b6b5866e39c037c604cccb04aa3788684953fa00c5dccf"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.455463 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-768f876787-6snnf" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon-log" containerID="cri-o://49c0bc829ec33a751d6ae77a82a9bf7cecdc0e6f8422bf140aa462f06e420073" gracePeriod=30 Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.455548 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerStarted","Data":"49c0bc829ec33a751d6ae77a82a9bf7cecdc0e6f8422bf140aa462f06e420073"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.455660 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-768f876787-6snnf" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon" containerID="cri-o://5d2857c8b031fdea53b6b5866e39c037c604cccb04aa3788684953fa00c5dccf" gracePeriod=30 Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.461734 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.472803 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerStarted","Data":"65ae73baf982279b1029ae39d9022724da41a50922efb382d9bb761c1c4b1753"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.473046 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerStarted","Data":"93647e0225eee6336edcf0baca3971246c9732cc76340cbfb6ab71e6e1227f4e"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.481704 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerStarted","Data":"0fda5311e5eb48480c323d4b3199e1648266ed56df1a435a9b4ca4fb07737ffb"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.483645 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-768f876787-6snnf" podStartSLOduration=3.117480697 podStartE2EDuration="28.483631474s" podCreationTimestamp="2025-12-01 20:22:27 +0000 UTC" firstStartedPulling="2025-12-01 20:22:28.529962164 +0000 UTC m=+1068.457043581" lastFinishedPulling="2025-12-01 20:22:53.896112921 +0000 UTC m=+1093.823194358" observedRunningTime="2025-12-01 20:22:55.482345244 +0000 UTC m=+1095.409426651" watchObservedRunningTime="2025-12-01 20:22:55.483631474 +0000 UTC m=+1095.410712891" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.495620 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6lqj8" event={"ID":"7feb09eb-34d4-4b20-9904-bcde9ec4a9db","Type":"ContainerStarted","Data":"614f6ee7e96a76d16418a17bed3b88c340405bc2f9c9064b22252542bce3a355"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.495678 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6lqj8" event={"ID":"7feb09eb-34d4-4b20-9904-bcde9ec4a9db","Type":"ContainerStarted","Data":"b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.511438 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerStarted","Data":"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.511913 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6bd8d6ff69-cmc78" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon-log" containerID="cri-o://ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50" gracePeriod=30 Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.512092 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6bd8d6ff69-cmc78" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon" containerID="cri-o://61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff" gracePeriod=30 Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.519975 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-d7844c8bb-bfdj8" podStartSLOduration=22.519955822 podStartE2EDuration="22.519955822s" podCreationTimestamp="2025-12-01 20:22:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:55.514939064 +0000 UTC m=+1095.442020481" watchObservedRunningTime="2025-12-01 20:22:55.519955822 +0000 UTC m=+1095.447037239" Dec 01 20:22:55 crc kubenswrapper[4852]: W1201 20:22:55.526721 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1d8a3fc_8cb0_496d_a22d_c590235f0a1d.slice/crio-5ae3dc79a4fa06701426e27d69f588ba1083189a3716ab001ee9416f5d89afc2 WatchSource:0}: Error finding container 5ae3dc79a4fa06701426e27d69f588ba1083189a3716ab001ee9416f5d89afc2: Status 404 returned error can't find the container with id 5ae3dc79a4fa06701426e27d69f588ba1083189a3716ab001ee9416f5d89afc2 Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.537848 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd6f888-xxwbg" event={"ID":"160a77b2-5ec6-4223-b939-8e90b339f530","Type":"ContainerStarted","Data":"27e3efa80e3c94ec8cdc9c0e1f8271e1591f601a868b309b2e0dd1d88307669d"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.537909 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfd6f888-xxwbg" event={"ID":"160a77b2-5ec6-4223-b939-8e90b339f530","Type":"ContainerStarted","Data":"bbaaa82dc098b6f8e3227d444c2018580bc43efab7ea81a3a09df6fc07200ae3"} Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.548009 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6bd8d6ff69-cmc78" podStartSLOduration=5.609650682 podStartE2EDuration="32.547972878s" podCreationTimestamp="2025-12-01 20:22:23 +0000 UTC" firstStartedPulling="2025-12-01 20:22:26.393560503 +0000 UTC m=+1066.320641920" lastFinishedPulling="2025-12-01 20:22:53.331882689 +0000 UTC m=+1093.258964116" observedRunningTime="2025-12-01 20:22:55.537567629 +0000 UTC m=+1095.464649046" watchObservedRunningTime="2025-12-01 20:22:55.547972878 +0000 UTC m=+1095.475054295" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.572359 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6lqj8" podStartSLOduration=10.572333588 podStartE2EDuration="10.572333588s" podCreationTimestamp="2025-12-01 20:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:55.562106474 +0000 UTC m=+1095.489187891" watchObservedRunningTime="2025-12-01 20:22:55.572333588 +0000 UTC m=+1095.499415005" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.768574 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-dfd6f888-xxwbg" podStartSLOduration=21.768550919 podStartE2EDuration="21.768550919s" podCreationTimestamp="2025-12-01 20:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:55.631307132 +0000 UTC m=+1095.558388549" watchObservedRunningTime="2025-12-01 20:22:55.768550919 +0000 UTC m=+1095.695632336" Dec 01 20:22:55 crc kubenswrapper[4852]: I1201 20:22:55.781521 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.135580 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:22:56 crc kubenswrapper[4852]: W1201 20:22:56.138224 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9f6adbd_d5d8_4057_84ad_fd835e718a8e.slice/crio-23dcbb4b9feffeadc51bdd80c9fab04af1037ec426b56d8cd703cebafc1dae0f WatchSource:0}: Error finding container 23dcbb4b9feffeadc51bdd80c9fab04af1037ec426b56d8cd703cebafc1dae0f: Status 404 returned error can't find the container with id 23dcbb4b9feffeadc51bdd80c9fab04af1037ec426b56d8cd703cebafc1dae0f Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.587766 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerStarted","Data":"5ae3dc79a4fa06701426e27d69f588ba1083189a3716ab001ee9416f5d89afc2"} Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.601368 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerStarted","Data":"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7"} Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.605392 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerStarted","Data":"f2578a68a9ff23eee774dceb2db68c13807f48a0c27ecea4f39011272f7755f2"} Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.605425 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerStarted","Data":"23dcbb4b9feffeadc51bdd80c9fab04af1037ec426b56d8cd703cebafc1dae0f"} Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.611532 4852 generic.go:334] "Generic (PLEG): container finished" podID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerID="b0648aad07c004b472b19d9d93114a64831a810dbe2cc80cd9047cf7579f0dcd" exitCode=0 Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.611677 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" event={"ID":"c1747b3e-a948-4dda-9ab3-403d5e33cee4","Type":"ContainerDied","Data":"b0648aad07c004b472b19d9d93114a64831a810dbe2cc80cd9047cf7579f0dcd"} Dec 01 20:22:56 crc kubenswrapper[4852]: I1201 20:22:56.611759 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" event={"ID":"c1747b3e-a948-4dda-9ab3-403d5e33cee4","Type":"ContainerStarted","Data":"4cda01031081d89598cd11126089e72dae823e7ccf0a2ba23bd265e6738207d8"} Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.520305 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-768f876787-6snnf" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.647466 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerStarted","Data":"2a97efa173c06e0793777a5c5b4ab0b3077929e081edd4902bf2777a6f2553cf"} Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.649533 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.663945 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" event={"ID":"c1747b3e-a948-4dda-9ab3-403d5e33cee4","Type":"ContainerStarted","Data":"585ef0f958916db0b6897aea839553c6018c4cb4aec6f533caf661087ff6d8f2"} Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.664200 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.677472 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5cd4765d7b-hrwph" podStartSLOduration=3.67742787 podStartE2EDuration="3.67742787s" podCreationTimestamp="2025-12-01 20:22:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:57.675825598 +0000 UTC m=+1097.602907005" watchObservedRunningTime="2025-12-01 20:22:57.67742787 +0000 UTC m=+1097.604509287" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.680070 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerStarted","Data":"af740d1b552f0b3e423e9e9856e527cdaa85ef20f2cd30f0c7f236f3fd0c1961"} Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.684627 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerStarted","Data":"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b"} Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.710242 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" podStartSLOduration=3.710216376 podStartE2EDuration="3.710216376s" podCreationTimestamp="2025-12-01 20:22:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:57.702888304 +0000 UTC m=+1097.629969721" watchObservedRunningTime="2025-12-01 20:22:57.710216376 +0000 UTC m=+1097.637297793" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.731168 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=12.731146217 podStartE2EDuration="12.731146217s" podCreationTimestamp="2025-12-01 20:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:57.730302321 +0000 UTC m=+1097.657383738" watchObservedRunningTime="2025-12-01 20:22:57.731146217 +0000 UTC m=+1097.658227634" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.818329 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55686cd79f-5wjtq"] Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.821061 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.835560 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.835782 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.846579 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55686cd79f-5wjtq"] Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907575 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-ovndb-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907649 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907693 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-public-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907725 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-internal-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907771 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-httpd-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907811 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlwr8\" (UniqueName: \"kubernetes.io/projected/fc88aba6-cf67-4609-9a04-797090fcce15-kube-api-access-vlwr8\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:57 crc kubenswrapper[4852]: I1201 20:22:57.907835 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-combined-ca-bundle\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009544 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-public-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009631 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-internal-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009690 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-httpd-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlwr8\" (UniqueName: \"kubernetes.io/projected/fc88aba6-cf67-4609-9a04-797090fcce15-kube-api-access-vlwr8\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009757 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-combined-ca-bundle\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009829 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-ovndb-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.009879 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.021678 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-combined-ca-bundle\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.022235 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.022536 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-ovndb-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.030473 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-internal-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.030805 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-public-tls-certs\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.041395 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fc88aba6-cf67-4609-9a04-797090fcce15-httpd-config\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.049060 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlwr8\" (UniqueName: \"kubernetes.io/projected/fc88aba6-cf67-4609-9a04-797090fcce15-kube-api-access-vlwr8\") pod \"neutron-55686cd79f-5wjtq\" (UID: \"fc88aba6-cf67-4609-9a04-797090fcce15\") " pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.200118 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.721348 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerStarted","Data":"2f92d9f708bb46fd47a913597b9c5d38fe1e6a397e9b3299a15733b35526b617"} Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.734908 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9mp8s" event={"ID":"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58","Type":"ContainerStarted","Data":"dfe745f60839d0ae1dbae3babaae2d3a9388616ba7ba747d914d899ca4204850"} Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.764701 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.764670321 podStartE2EDuration="5.764670321s" podCreationTimestamp="2025-12-01 20:22:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:22:58.749991268 +0000 UTC m=+1098.677072685" watchObservedRunningTime="2025-12-01 20:22:58.764670321 +0000 UTC m=+1098.691751738" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.772206 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9mp8s" podStartSLOduration=4.120962688 podStartE2EDuration="34.772175389s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="2025-12-01 20:22:26.414438153 +0000 UTC m=+1066.341519570" lastFinishedPulling="2025-12-01 20:22:57.065650864 +0000 UTC m=+1096.992732271" observedRunningTime="2025-12-01 20:22:58.768907646 +0000 UTC m=+1098.695989063" watchObservedRunningTime="2025-12-01 20:22:58.772175389 +0000 UTC m=+1098.699256806" Dec 01 20:22:58 crc kubenswrapper[4852]: I1201 20:22:58.940936 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55686cd79f-5wjtq"] Dec 01 20:22:59 crc kubenswrapper[4852]: I1201 20:22:59.756244 4852 generic.go:334] "Generic (PLEG): container finished" podID="54be94f0-c30e-4a21-9a22-a055b5e6154f" containerID="0832d29e188960d4ec25f7544e39bea226190242c52c6d164b84ef4bf9af1895" exitCode=0 Dec 01 20:22:59 crc kubenswrapper[4852]: I1201 20:22:59.756902 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh7zl" event={"ID":"54be94f0-c30e-4a21-9a22-a055b5e6154f","Type":"ContainerDied","Data":"0832d29e188960d4ec25f7544e39bea226190242c52c6d164b84ef4bf9af1895"} Dec 01 20:22:59 crc kubenswrapper[4852]: I1201 20:22:59.762430 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55686cd79f-5wjtq" event={"ID":"fc88aba6-cf67-4609-9a04-797090fcce15","Type":"ContainerStarted","Data":"8d11742511717ffec3ff818755408da3f64b4f3d95188a7d4399feed47342b85"} Dec 01 20:23:00 crc kubenswrapper[4852]: I1201 20:23:00.779716 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55686cd79f-5wjtq" event={"ID":"fc88aba6-cf67-4609-9a04-797090fcce15","Type":"ContainerStarted","Data":"190ca9d4b5780bba0f2a01b650de33754c3edfbcb8ad62cd44b3540794fe3db3"} Dec 01 20:23:00 crc kubenswrapper[4852]: I1201 20:23:00.780235 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:23:00 crc kubenswrapper[4852]: I1201 20:23:00.780254 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55686cd79f-5wjtq" event={"ID":"fc88aba6-cf67-4609-9a04-797090fcce15","Type":"ContainerStarted","Data":"92fab0d2e13ab63c13b8eff6a151fcf704421a6df74604cbcdb9b45c77266725"} Dec 01 20:23:00 crc kubenswrapper[4852]: I1201 20:23:00.810888 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55686cd79f-5wjtq" podStartSLOduration=3.810868691 podStartE2EDuration="3.810868691s" podCreationTimestamp="2025-12-01 20:22:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:00.804961304 +0000 UTC m=+1100.732042721" watchObservedRunningTime="2025-12-01 20:23:00.810868691 +0000 UTC m=+1100.737950108" Dec 01 20:23:02 crc kubenswrapper[4852]: I1201 20:23:02.806322 4852 generic.go:334] "Generic (PLEG): container finished" podID="7feb09eb-34d4-4b20-9904-bcde9ec4a9db" containerID="614f6ee7e96a76d16418a17bed3b88c340405bc2f9c9064b22252542bce3a355" exitCode=0 Dec 01 20:23:02 crc kubenswrapper[4852]: I1201 20:23:02.806361 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6lqj8" event={"ID":"7feb09eb-34d4-4b20-9904-bcde9ec4a9db","Type":"ContainerDied","Data":"614f6ee7e96a76d16418a17bed3b88c340405bc2f9c9064b22252542bce3a355"} Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.796341 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.796737 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.821112 4852 generic.go:334] "Generic (PLEG): container finished" podID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" containerID="dfe745f60839d0ae1dbae3babaae2d3a9388616ba7ba747d914d899ca4204850" exitCode=0 Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.821195 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9mp8s" event={"ID":"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58","Type":"ContainerDied","Data":"dfe745f60839d0ae1dbae3babaae2d3a9388616ba7ba747d914d899ca4204850"} Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.858577 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:03 crc kubenswrapper[4852]: I1201 20:23:03.859350 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.242226 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.242783 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.368473 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.368527 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.601799 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.801538 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh7zl" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.827904 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.855682 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6lqj8" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.855789 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6lqj8" event={"ID":"7feb09eb-34d4-4b20-9904-bcde9ec4a9db","Type":"ContainerDied","Data":"b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417"} Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.855836 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9c094817aea440017ccb88e2f320c67933379c6d146131ec406d56c1834f417" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.862284 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh7zl" event={"ID":"54be94f0-c30e-4a21-9a22-a055b5e6154f","Type":"ContainerDied","Data":"8d6a5854318e1a8d581073faa7d96b16f773d4a29f4c2386e8e3b868947c3d7b"} Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.862339 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d6a5854318e1a8d581073faa7d96b16f773d4a29f4c2386e8e3b868947c3d7b" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.863041 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh7zl" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.863080 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.863182 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970514 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle\") pod \"54be94f0-c30e-4a21-9a22-a055b5e6154f\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970569 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data\") pod \"54be94f0-c30e-4a21-9a22-a055b5e6154f\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970609 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970739 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970768 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfqtb\" (UniqueName: \"kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970807 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970860 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970888 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle\") pod \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\" (UID: \"7feb09eb-34d4-4b20-9904-bcde9ec4a9db\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970918 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs\") pod \"54be94f0-c30e-4a21-9a22-a055b5e6154f\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970949 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts\") pod \"54be94f0-c30e-4a21-9a22-a055b5e6154f\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.970993 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdsc9\" (UniqueName: \"kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9\") pod \"54be94f0-c30e-4a21-9a22-a055b5e6154f\" (UID: \"54be94f0-c30e-4a21-9a22-a055b5e6154f\") " Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.975690 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs" (OuterVolumeSpecName: "logs") pod "54be94f0-c30e-4a21-9a22-a055b5e6154f" (UID: "54be94f0-c30e-4a21-9a22-a055b5e6154f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.983591 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts" (OuterVolumeSpecName: "scripts") pod "54be94f0-c30e-4a21-9a22-a055b5e6154f" (UID: "54be94f0-c30e-4a21-9a22-a055b5e6154f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:04 crc kubenswrapper[4852]: I1201 20:23:04.986568 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts" (OuterVolumeSpecName: "scripts") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:04.998641 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9" (OuterVolumeSpecName: "kube-api-access-sdsc9") pod "54be94f0-c30e-4a21-9a22-a055b5e6154f" (UID: "54be94f0-c30e-4a21-9a22-a055b5e6154f"). InnerVolumeSpecName "kube-api-access-sdsc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:04.998668 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:04.999261 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.007168 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.018764 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb" (OuterVolumeSpecName: "kube-api-access-dfqtb") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "kube-api-access-dfqtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.068290 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data" (OuterVolumeSpecName: "config-data") pod "54be94f0-c30e-4a21-9a22-a055b5e6154f" (UID: "54be94f0-c30e-4a21-9a22-a055b5e6154f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.073644 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074050 4852 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074078 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfqtb\" (UniqueName: \"kubernetes.io/projected/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-kube-api-access-dfqtb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074090 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074101 4852 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074125 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54be94f0-c30e-4a21-9a22-a055b5e6154f-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074136 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.074150 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdsc9\" (UniqueName: \"kubernetes.io/projected/54be94f0-c30e-4a21-9a22-a055b5e6154f-kube-api-access-sdsc9\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.083852 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54be94f0-c30e-4a21-9a22-a055b5e6154f" (UID: "54be94f0-c30e-4a21-9a22-a055b5e6154f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.098729 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data" (OuterVolumeSpecName: "config-data") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.102318 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7feb09eb-34d4-4b20-9904-bcde9ec4a9db" (UID: "7feb09eb-34d4-4b20-9904-bcde9ec4a9db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.226959 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54be94f0-c30e-4a21-9a22-a055b5e6154f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.234835 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.235035 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7feb09eb-34d4-4b20-9904-bcde9ec4a9db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.296288 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.303514 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="dnsmasq-dns" containerID="cri-o://1966432c165e2d879fa973f536e9b94fe0f1fa19b654677eccc855f2b3d20c3a" gracePeriod=10 Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.323570 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.347372 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data\") pod \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.347527 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g86nr\" (UniqueName: \"kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr\") pod \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.347665 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle\") pod \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\" (UID: \"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58\") " Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.355039 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr" (OuterVolumeSpecName: "kube-api-access-g86nr") pod "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" (UID: "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58"). InnerVolumeSpecName "kube-api-access-g86nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.393659 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" (UID: "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.412977 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" (UID: "622dd636-8b05-4a9c-aa97-0fb5fd5d0c58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.453574 4852 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.454067 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g86nr\" (UniqueName: \"kubernetes.io/projected/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-kube-api-access-g86nr\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.454079 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.696277 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.696334 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.750218 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.764054 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.887480 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerStarted","Data":"04badb7a966d4ff03a4b686961f518ba22e8ea9c194801038b413442511cae89"} Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.902716 4852 generic.go:334] "Generic (PLEG): container finished" podID="553feaf1-d70f-46b8-89db-a7a141377b01" containerID="1966432c165e2d879fa973f536e9b94fe0f1fa19b654677eccc855f2b3d20c3a" exitCode=0 Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.902784 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" event={"ID":"553feaf1-d70f-46b8-89db-a7a141377b01","Type":"ContainerDied","Data":"1966432c165e2d879fa973f536e9b94fe0f1fa19b654677eccc855f2b3d20c3a"} Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.902815 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" event={"ID":"553feaf1-d70f-46b8-89db-a7a141377b01","Type":"ContainerDied","Data":"e66480bc68d019161982ab58f4ebf340f6ddd216b7dd36cb5d95ea51df53ed33"} Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.902830 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e66480bc68d019161982ab58f4ebf340f6ddd216b7dd36cb5d95ea51df53ed33" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.915791 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9mp8s" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.918613 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9mp8s" event={"ID":"622dd636-8b05-4a9c-aa97-0fb5fd5d0c58","Type":"ContainerDied","Data":"5695a82c9f465ed3f5c6ec4a49cae3e05a45f1db5687e8d26e19e3f986371608"} Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.918665 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5695a82c9f465ed3f5c6ec4a49cae3e05a45f1db5687e8d26e19e3f986371608" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.920225 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.920244 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 20:23:05 crc kubenswrapper[4852]: I1201 20:23:05.971613 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.051763 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-fd7b4cb9d-8zvhn"] Dec 01 20:23:06 crc kubenswrapper[4852]: E1201 20:23:06.052264 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54be94f0-c30e-4a21-9a22-a055b5e6154f" containerName="placement-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052282 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="54be94f0-c30e-4a21-9a22-a055b5e6154f" containerName="placement-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: E1201 20:23:06.052295 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="dnsmasq-dns" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052303 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="dnsmasq-dns" Dec 01 20:23:06 crc kubenswrapper[4852]: E1201 20:23:06.052346 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7feb09eb-34d4-4b20-9904-bcde9ec4a9db" containerName="keystone-bootstrap" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052353 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7feb09eb-34d4-4b20-9904-bcde9ec4a9db" containerName="keystone-bootstrap" Dec 01 20:23:06 crc kubenswrapper[4852]: E1201 20:23:06.052364 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" containerName="barbican-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052372 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" containerName="barbican-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: E1201 20:23:06.052387 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="init" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052392 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="init" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052591 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="54be94f0-c30e-4a21-9a22-a055b5e6154f" containerName="placement-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052604 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7feb09eb-34d4-4b20-9904-bcde9ec4a9db" containerName="keystone-bootstrap" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052614 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" containerName="dnsmasq-dns" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.052635 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" containerName="barbican-db-sync" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.053811 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.060517 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rcdz5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.060838 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.061680 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.061733 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.062033 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.070718 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-574c7f8dfc-6k2xn"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071235 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpbqd\" (UniqueName: \"kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071306 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071426 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071534 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071709 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.071738 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb\") pod \"553feaf1-d70f-46b8-89db-a7a141377b01\" (UID: \"553feaf1-d70f-46b8-89db-a7a141377b01\") " Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.072737 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.076528 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.076822 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.077128 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.077413 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-tqglv" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.077781 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.077916 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.103441 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-574c7f8dfc-6k2xn"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.116873 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd" (OuterVolumeSpecName: "kube-api-access-qpbqd") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "kube-api-access-qpbqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.138612 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fd7b4cb9d-8zvhn"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175347 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj4r6\" (UniqueName: \"kubernetes.io/projected/d5fbd8b0-59fb-402e-8442-0302ea125e49-kube-api-access-vj4r6\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175400 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-public-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175466 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-combined-ca-bundle\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175499 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-internal-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175527 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-scripts\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175547 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-credential-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175564 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-combined-ca-bundle\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175620 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nwgz\" (UniqueName: \"kubernetes.io/projected/e12728e7-2002-493c-ad13-3bbb68e8ecf7-kube-api-access-4nwgz\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175641 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-internal-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175670 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-config-data\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175720 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-public-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175744 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-scripts\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175783 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-fernet-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175801 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-config-data\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175826 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e12728e7-2002-493c-ad13-3bbb68e8ecf7-logs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.175873 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpbqd\" (UniqueName: \"kubernetes.io/projected/553feaf1-d70f-46b8-89db-a7a141377b01-kube-api-access-qpbqd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.253277 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5dcb96d78f-vvtc5"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288094 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-fernet-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288181 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-config-data\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288226 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e12728e7-2002-493c-ad13-3bbb68e8ecf7-logs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288254 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj4r6\" (UniqueName: \"kubernetes.io/projected/d5fbd8b0-59fb-402e-8442-0302ea125e49-kube-api-access-vj4r6\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288274 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-public-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288323 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-combined-ca-bundle\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288364 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-internal-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288398 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-scripts\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288428 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-credential-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288470 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-combined-ca-bundle\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288548 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-internal-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288573 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nwgz\" (UniqueName: \"kubernetes.io/projected/e12728e7-2002-493c-ad13-3bbb68e8ecf7-kube-api-access-4nwgz\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288613 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-config-data\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288665 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-public-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.288689 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-scripts\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.290675 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.305628 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ddn7d" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.305928 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.306567 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.318819 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.320577 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e12728e7-2002-493c-ad13-3bbb68e8ecf7-logs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.327277 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config" (OuterVolumeSpecName: "config") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.327357 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.341679 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.350158 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.350249 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.352443 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-public-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.353111 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-fernet-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.356090 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-internal-tls-certs\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.361600 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-credential-keys\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.366892 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-config-data\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.368277 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-public-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.371837 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-scripts\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.373752 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nwgz\" (UniqueName: \"kubernetes.io/projected/e12728e7-2002-493c-ad13-3bbb68e8ecf7-kube-api-access-4nwgz\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.375875 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-combined-ca-bundle\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.377022 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-internal-tls-certs\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.378279 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-combined-ca-bundle\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.381107 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj4r6\" (UniqueName: \"kubernetes.io/projected/d5fbd8b0-59fb-402e-8442-0302ea125e49-kube-api-access-vj4r6\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.381339 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12728e7-2002-493c-ad13-3bbb68e8ecf7-config-data\") pod \"placement-fd7b4cb9d-8zvhn\" (UID: \"e12728e7-2002-493c-ad13-3bbb68e8ecf7\") " pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.398395 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.398982 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-combined-ca-bundle\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399108 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399496 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data-custom\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ec762a2-a422-40fd-91a8-fdaf58be343c-logs\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399621 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ncmn\" (UniqueName: \"kubernetes.io/projected/6ec762a2-a422-40fd-91a8-fdaf58be343c-kube-api-access-8ncmn\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399911 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399932 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.399944 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.418717 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5dcb96d78f-vvtc5"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.418758 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.423956 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5fbd8b0-59fb-402e-8442-0302ea125e49-scripts\") pod \"keystone-574c7f8dfc-6k2xn\" (UID: \"d5fbd8b0-59fb-402e-8442-0302ea125e49\") " pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.443694 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.443822 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.447337 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.460877 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "553feaf1-d70f-46b8-89db-a7a141377b01" (UID: "553feaf1-d70f-46b8-89db-a7a141377b01"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.478862 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502524 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data-custom\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502693 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502735 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data-custom\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502772 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502817 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502858 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ec762a2-a422-40fd-91a8-fdaf58be343c-logs\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502895 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ncmn\" (UniqueName: \"kubernetes.io/projected/6ec762a2-a422-40fd-91a8-fdaf58be343c-kube-api-access-8ncmn\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502939 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-combined-ca-bundle\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.502989 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6030470-6a0e-43fc-ae0c-755a3d4a9980-logs\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.503039 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48plp\" (UniqueName: \"kubernetes.io/projected/d6030470-6a0e-43fc-ae0c-755a3d4a9980-kube-api-access-48plp\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.503119 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.503134 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/553feaf1-d70f-46b8-89db-a7a141377b01-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.514484 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.515738 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ec762a2-a422-40fd-91a8-fdaf58be343c-logs\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.517140 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.537906 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-config-data-custom\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.547900 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ncmn\" (UniqueName: \"kubernetes.io/projected/6ec762a2-a422-40fd-91a8-fdaf58be343c-kube-api-access-8ncmn\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.556263 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec762a2-a422-40fd-91a8-fdaf58be343c-combined-ca-bundle\") pod \"barbican-worker-5dcb96d78f-vvtc5\" (UID: \"6ec762a2-a422-40fd-91a8-fdaf58be343c\") " pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.606833 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48plp\" (UniqueName: \"kubernetes.io/projected/d6030470-6a0e-43fc-ae0c-755a3d4a9980-kube-api-access-48plp\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.606890 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.606916 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data-custom\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.606944 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jssd\" (UniqueName: \"kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.606982 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607009 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607024 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607059 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607083 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607104 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607160 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6030470-6a0e-43fc-ae0c-755a3d4a9980-logs\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.607565 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6030470-6a0e-43fc-ae0c-755a3d4a9980-logs\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.673335 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-combined-ca-bundle\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.677730 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data-custom\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.678312 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.689010 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48plp\" (UniqueName: \"kubernetes.io/projected/d6030470-6a0e-43fc-ae0c-755a3d4a9980-kube-api-access-48plp\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.707814 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6030470-6a0e-43fc-ae0c-755a3d4a9980-config-data\") pod \"barbican-keystone-listener-6ffc9cc59d-mbzlh\" (UID: \"d6030470-6a0e-43fc-ae0c-755a3d4a9980\") " pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710274 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710350 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jssd\" (UniqueName: \"kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710423 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710485 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710509 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.710580 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.713791 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.714448 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.715134 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.715948 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.718127 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.724730 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.777489 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jssd\" (UniqueName: \"kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd\") pod \"dnsmasq-dns-cf96b7dc5-2q6zp\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.800721 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.848414 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.850743 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.855817 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.876305 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.938935 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.939028 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.939088 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.939152 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.939188 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dv8f\" (UniqueName: \"kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:06 crc kubenswrapper[4852]: I1201 20:23:06.943781 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f5d458b55-7d2nd" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.010666 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.034859 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f5d458b55-7d2nd"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.043853 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dv8f\" (UniqueName: \"kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.043924 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.044019 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.044076 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.044135 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.047853 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.064847 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.065972 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.074028 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.117163 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dv8f\" (UniqueName: \"kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f\") pod \"barbican-api-95bdd6c68-zqjgq\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.194279 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.213283 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fd7b4cb9d-8zvhn"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.398883 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-574c7f8dfc-6k2xn"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.778491 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.859511 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5dcb96d78f-vvtc5"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.916793 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.984407 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fd7b4cb9d-8zvhn" event={"ID":"e12728e7-2002-493c-ad13-3bbb68e8ecf7","Type":"ContainerStarted","Data":"0df7f64b0a0ba9296bf80071711c22d38f9b841d4fb2cc504e75408c7f75dc09"} Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.986391 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" event={"ID":"16b547de-9574-4465-94e3-054b5013ece5","Type":"ContainerStarted","Data":"2f81d434f483e4dfe44b02c279859ef46be9aac301e388961f512fe17c259bd5"} Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.989375 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" event={"ID":"6ec762a2-a422-40fd-91a8-fdaf58be343c","Type":"ContainerStarted","Data":"b9b76b4eab1c48c6f3cd3c5ebc0215a95909ae2d84fcbdd5f3f21319e0cec43c"} Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.991509 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" event={"ID":"d6030470-6a0e-43fc-ae0c-755a3d4a9980","Type":"ContainerStarted","Data":"2811b9fdb19f2bf2503dbc49bcaa71e27bac1222007247920d679b7da410704e"} Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.993914 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.993939 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:23:07 crc kubenswrapper[4852]: I1201 20:23:07.993904 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-574c7f8dfc-6k2xn" event={"ID":"d5fbd8b0-59fb-402e-8442-0302ea125e49","Type":"ContainerStarted","Data":"e3b060307291301a20c794437798483dcb49f3bcae18ede51ab30b4073a19818"} Dec 01 20:23:08 crc kubenswrapper[4852]: I1201 20:23:08.355978 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="553feaf1-d70f-46b8-89db-a7a141377b01" path="/var/lib/kubelet/pods/553feaf1-d70f-46b8-89db-a7a141377b01/volumes" Dec 01 20:23:08 crc kubenswrapper[4852]: I1201 20:23:08.693286 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:08 crc kubenswrapper[4852]: W1201 20:23:08.743325 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa9160bb_32cb_4617_8ecf_3ca078d2008a.slice/crio-e3ebebff086ec9031ad9dfdd1bc6fdfbbd16134cbfc584c019be829942365f0e WatchSource:0}: Error finding container e3ebebff086ec9031ad9dfdd1bc6fdfbbd16134cbfc584c019be829942365f0e: Status 404 returned error can't find the container with id e3ebebff086ec9031ad9dfdd1bc6fdfbbd16134cbfc584c019be829942365f0e Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.044143 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerStarted","Data":"e3ebebff086ec9031ad9dfdd1bc6fdfbbd16134cbfc584c019be829942365f0e"} Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.048711 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-574c7f8dfc-6k2xn" event={"ID":"d5fbd8b0-59fb-402e-8442-0302ea125e49","Type":"ContainerStarted","Data":"c0f425409a4868ace3085919e7833097e4d0dd4fbea62c89f8d4e840f61a58f8"} Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.051308 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.065047 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fd7b4cb9d-8zvhn" event={"ID":"e12728e7-2002-493c-ad13-3bbb68e8ecf7","Type":"ContainerStarted","Data":"9f3856bc55067aba0b0ef5ebe4e4ad97b0d6612bba6994ec898d41412e012d6f"} Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.065094 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fd7b4cb9d-8zvhn" event={"ID":"e12728e7-2002-493c-ad13-3bbb68e8ecf7","Type":"ContainerStarted","Data":"7b84104db318f04a070cc673d8a24680e363f356aded1c318d8c201af5499dd2"} Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.066280 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.066920 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.086139 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-574c7f8dfc-6k2xn" podStartSLOduration=3.086112064 podStartE2EDuration="3.086112064s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:09.078134003 +0000 UTC m=+1109.005215420" watchObservedRunningTime="2025-12-01 20:23:09.086112064 +0000 UTC m=+1109.013193481" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.093633 4852 generic.go:334] "Generic (PLEG): container finished" podID="16b547de-9574-4465-94e3-054b5013ece5" containerID="acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182" exitCode=0 Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.093699 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" event={"ID":"16b547de-9574-4465-94e3-054b5013ece5","Type":"ContainerDied","Data":"acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182"} Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.161142 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-fd7b4cb9d-8zvhn" podStartSLOduration=3.161106909 podStartE2EDuration="3.161106909s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:09.12165452 +0000 UTC m=+1109.048735937" watchObservedRunningTime="2025-12-01 20:23:09.161106909 +0000 UTC m=+1109.088188326" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.323316 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:09 crc kubenswrapper[4852]: I1201 20:23:09.334801 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.022476 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6b6b9785cb-jncbj"] Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.027987 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.033156 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.033364 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.052465 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b6b9785cb-jncbj"] Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.116805 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerStarted","Data":"5f5d00484af4034237001a135aadcdfe440886eb64ce1e86eebf37c1c959f96c"} Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.116883 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerStarted","Data":"46abf8cba3f5f8ca1faad6a8c48325ab13918dc745dea9651dfcb773194b1662"} Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.118581 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.118613 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154792 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e282d8-f6fc-4c9b-84e2-398efd252579-logs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154847 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-public-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154871 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-internal-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154905 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154926 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data-custom\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.154975 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-combined-ca-bundle\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.155003 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbtb8\" (UniqueName: \"kubernetes.io/projected/66e282d8-f6fc-4c9b-84e2-398efd252579-kube-api-access-dbtb8\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.158871 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-95bdd6c68-zqjgq" podStartSLOduration=4.158846259 podStartE2EDuration="4.158846259s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:10.150301131 +0000 UTC m=+1110.077382558" watchObservedRunningTime="2025-12-01 20:23:10.158846259 +0000 UTC m=+1110.085927676" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.169853 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" event={"ID":"16b547de-9574-4465-94e3-054b5013ece5","Type":"ContainerStarted","Data":"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487"} Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.170689 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.206652 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" podStartSLOduration=4.206627429 podStartE2EDuration="4.206627429s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:10.197711369 +0000 UTC m=+1110.124792796" watchObservedRunningTime="2025-12-01 20:23:10.206627429 +0000 UTC m=+1110.133708846" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.257908 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-combined-ca-bundle\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.257990 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbtb8\" (UniqueName: \"kubernetes.io/projected/66e282d8-f6fc-4c9b-84e2-398efd252579-kube-api-access-dbtb8\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.258130 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e282d8-f6fc-4c9b-84e2-398efd252579-logs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.258158 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-public-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.258174 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-internal-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.258232 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.258260 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data-custom\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.260056 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e282d8-f6fc-4c9b-84e2-398efd252579-logs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.269605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-public-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.272372 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-internal-tls-certs\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.273085 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-combined-ca-bundle\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.274758 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.283897 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66e282d8-f6fc-4c9b-84e2-398efd252579-config-data-custom\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.298854 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbtb8\" (UniqueName: \"kubernetes.io/projected/66e282d8-f6fc-4c9b-84e2-398efd252579-kube-api-access-dbtb8\") pod \"barbican-api-6b6b9785cb-jncbj\" (UID: \"66e282d8-f6fc-4c9b-84e2-398efd252579\") " pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.478129 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.536516 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 20:23:10 crc kubenswrapper[4852]: I1201 20:23:10.536737 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:23:11 crc kubenswrapper[4852]: I1201 20:23:11.054203 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b6b9785cb-jncbj"] Dec 01 20:23:11 crc kubenswrapper[4852]: I1201 20:23:11.305423 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 20:23:12 crc kubenswrapper[4852]: I1201 20:23:12.204251 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b6b9785cb-jncbj" event={"ID":"66e282d8-f6fc-4c9b-84e2-398efd252579","Type":"ContainerStarted","Data":"1b464651c40a7b906622695c1162c12950ca4ca9dab40676460a31f6f890db91"} Dec 01 20:23:12 crc kubenswrapper[4852]: I1201 20:23:12.207514 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ml69l" event={"ID":"b531141a-eca1-4f9f-a67a-68d48d92add9","Type":"ContainerStarted","Data":"c95cc92f4d0bf963052b855977bad0e2abdf2a157fccf7ed9732875bfc4d099b"} Dec 01 20:23:12 crc kubenswrapper[4852]: I1201 20:23:12.230875 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-ml69l" podStartSLOduration=4.528047144 podStartE2EDuration="48.230846344s" podCreationTimestamp="2025-12-01 20:22:24 +0000 UTC" firstStartedPulling="2025-12-01 20:22:26.316900981 +0000 UTC m=+1066.243982398" lastFinishedPulling="2025-12-01 20:23:10.019700181 +0000 UTC m=+1109.946781598" observedRunningTime="2025-12-01 20:23:12.230590666 +0000 UTC m=+1112.157672093" watchObservedRunningTime="2025-12-01 20:23:12.230846344 +0000 UTC m=+1112.157927761" Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.220438 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b6b9785cb-jncbj" event={"ID":"66e282d8-f6fc-4c9b-84e2-398efd252579","Type":"ContainerStarted","Data":"40a5b403389f7fb2ac336462b9bb8395d4d980f214f69679973225612df1b56a"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.221411 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b6b9785cb-jncbj" event={"ID":"66e282d8-f6fc-4c9b-84e2-398efd252579","Type":"ContainerStarted","Data":"f3b0113fe216b657f757affdada48cf818b568166a616c9e7a10decc653c1760"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.227874 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" event={"ID":"6ec762a2-a422-40fd-91a8-fdaf58be343c","Type":"ContainerStarted","Data":"fb318d23050d3b4ffec8143749db57ce837b6fcd1c75d2e8be704569abb63536"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.228052 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" event={"ID":"6ec762a2-a422-40fd-91a8-fdaf58be343c","Type":"ContainerStarted","Data":"4932cf1966227dad7bf12151bb798ea07b64682a7ab5b9f5b7a544051214e4bf"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.230629 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" event={"ID":"d6030470-6a0e-43fc-ae0c-755a3d4a9980","Type":"ContainerStarted","Data":"08f5f6a512f204767be7fb6fdcc94fb99cb600cd5ee0ed59a7705701163e902b"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.230933 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" event={"ID":"d6030470-6a0e-43fc-ae0c-755a3d4a9980","Type":"ContainerStarted","Data":"400e3d12fce396f4bc33bcd970ccab1cc919a44ff78a1a3819693364b19f6947"} Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.245636 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6b6b9785cb-jncbj" podStartSLOduration=4.2456127200000005 podStartE2EDuration="4.24561272s" podCreationTimestamp="2025-12-01 20:23:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:13.241430778 +0000 UTC m=+1113.168512195" watchObservedRunningTime="2025-12-01 20:23:13.24561272 +0000 UTC m=+1113.172694137" Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.264432 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6ffc9cc59d-mbzlh" podStartSLOduration=3.117195129 podStartE2EDuration="7.26441097s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="2025-12-01 20:23:07.866097815 +0000 UTC m=+1107.793179232" lastFinishedPulling="2025-12-01 20:23:12.013313656 +0000 UTC m=+1111.940395073" observedRunningTime="2025-12-01 20:23:13.261817459 +0000 UTC m=+1113.188898876" watchObservedRunningTime="2025-12-01 20:23:13.26441097 +0000 UTC m=+1113.191492387" Dec 01 20:23:13 crc kubenswrapper[4852]: I1201 20:23:13.290652 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5dcb96d78f-vvtc5" podStartSLOduration=3.113654539 podStartE2EDuration="7.290620693s" podCreationTimestamp="2025-12-01 20:23:06 +0000 UTC" firstStartedPulling="2025-12-01 20:23:07.866353173 +0000 UTC m=+1107.793434590" lastFinishedPulling="2025-12-01 20:23:12.043319327 +0000 UTC m=+1111.970400744" observedRunningTime="2025-12-01 20:23:13.278157691 +0000 UTC m=+1113.205239108" watchObservedRunningTime="2025-12-01 20:23:13.290620693 +0000 UTC m=+1113.217702110" Dec 01 20:23:14 crc kubenswrapper[4852]: I1201 20:23:14.245075 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 01 20:23:14 crc kubenswrapper[4852]: I1201 20:23:14.245115 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:14 crc kubenswrapper[4852]: I1201 20:23:14.245239 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:14 crc kubenswrapper[4852]: I1201 20:23:14.372232 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd6f888-xxwbg" podUID="160a77b2-5ec6-4223-b939-8e90b339f530" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Dec 01 20:23:16 crc kubenswrapper[4852]: I1201 20:23:16.270222 4852 generic.go:334] "Generic (PLEG): container finished" podID="b531141a-eca1-4f9f-a67a-68d48d92add9" containerID="c95cc92f4d0bf963052b855977bad0e2abdf2a157fccf7ed9732875bfc4d099b" exitCode=0 Dec 01 20:23:16 crc kubenswrapper[4852]: I1201 20:23:16.270318 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ml69l" event={"ID":"b531141a-eca1-4f9f-a67a-68d48d92add9","Type":"ContainerDied","Data":"c95cc92f4d0bf963052b855977bad0e2abdf2a157fccf7ed9732875bfc4d099b"} Dec 01 20:23:16 crc kubenswrapper[4852]: I1201 20:23:16.811855 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:16 crc kubenswrapper[4852]: I1201 20:23:16.893428 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:23:16 crc kubenswrapper[4852]: I1201 20:23:16.893742 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="dnsmasq-dns" containerID="cri-o://585ef0f958916db0b6897aea839553c6018c4cb4aec6f533caf661087ff6d8f2" gracePeriod=10 Dec 01 20:23:17 crc kubenswrapper[4852]: I1201 20:23:17.305722 4852 generic.go:334] "Generic (PLEG): container finished" podID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerID="585ef0f958916db0b6897aea839553c6018c4cb4aec6f533caf661087ff6d8f2" exitCode=0 Dec 01 20:23:17 crc kubenswrapper[4852]: I1201 20:23:17.306008 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" event={"ID":"c1747b3e-a948-4dda-9ab3-403d5e33cee4","Type":"ContainerDied","Data":"585ef0f958916db0b6897aea839553c6018c4cb4aec6f533caf661087ff6d8f2"} Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.814567 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ml69l" Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.921733 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.921838 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.921955 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.922080 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.922179 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.922214 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-762k9\" (UniqueName: \"kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9\") pod \"b531141a-eca1-4f9f-a67a-68d48d92add9\" (UID: \"b531141a-eca1-4f9f-a67a-68d48d92add9\") " Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.928584 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9" (OuterVolumeSpecName: "kube-api-access-762k9") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "kube-api-access-762k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.928626 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.936122 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:18 crc kubenswrapper[4852]: I1201 20:23:18.939893 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts" (OuterVolumeSpecName: "scripts") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.002732 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.022857 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data" (OuterVolumeSpecName: "config-data") pod "b531141a-eca1-4f9f-a67a-68d48d92add9" (UID: "b531141a-eca1-4f9f-a67a-68d48d92add9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027844 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027895 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-762k9\" (UniqueName: \"kubernetes.io/projected/b531141a-eca1-4f9f-a67a-68d48d92add9-kube-api-access-762k9\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027911 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027926 4852 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027939 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b531141a-eca1-4f9f-a67a-68d48d92add9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.027951 4852 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b531141a-eca1-4f9f-a67a-68d48d92add9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: E1201 20:23:19.112143 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.140608 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.333416 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerStarted","Data":"3af7518518380d5a325a7727ed537101bf669e44095130fcfe4a89a698311075"} Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.333680 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="ceilometer-notification-agent" containerID="cri-o://0fda5311e5eb48480c323d4b3199e1648266ed56df1a435a9b4ca4fb07737ffb" gracePeriod=30 Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.333772 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.334477 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="proxy-httpd" containerID="cri-o://3af7518518380d5a325a7727ed537101bf669e44095130fcfe4a89a698311075" gracePeriod=30 Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.334542 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="sg-core" containerID="cri-o://04badb7a966d4ff03a4b686961f518ba22e8ea9c194801038b413442511cae89" gracePeriod=30 Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337316 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337358 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njlpd\" (UniqueName: \"kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337578 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337628 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337664 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.337771 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc\") pod \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\" (UID: \"c1747b3e-a948-4dda-9ab3-403d5e33cee4\") " Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.349020 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ml69l" event={"ID":"b531141a-eca1-4f9f-a67a-68d48d92add9","Type":"ContainerDied","Data":"97bd0243d04ca5155ad14ef2a3874c0574111edc3898fab28f0768c649900826"} Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.349084 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97bd0243d04ca5155ad14ef2a3874c0574111edc3898fab28f0768c649900826" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.349167 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ml69l" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.383845 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" event={"ID":"c1747b3e-a948-4dda-9ab3-403d5e33cee4","Type":"ContainerDied","Data":"4cda01031081d89598cd11126089e72dae823e7ccf0a2ba23bd265e6738207d8"} Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.383962 4852 scope.go:117] "RemoveContainer" containerID="585ef0f958916db0b6897aea839553c6018c4cb4aec6f533caf661087ff6d8f2" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.384202 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f8dc44d89-b8cwd" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.387724 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd" (OuterVolumeSpecName: "kube-api-access-njlpd") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "kube-api-access-njlpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.419670 4852 scope.go:117] "RemoveContainer" containerID="b0648aad07c004b472b19d9d93114a64831a810dbe2cc80cd9047cf7579f0dcd" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.424636 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.431492 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.440180 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.440222 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.440233 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njlpd\" (UniqueName: \"kubernetes.io/projected/c1747b3e-a948-4dda-9ab3-403d5e33cee4-kube-api-access-njlpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.451654 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.452046 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.471731 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config" (OuterVolumeSpecName: "config") pod "c1747b3e-a948-4dda-9ab3-403d5e33cee4" (UID: "c1747b3e-a948-4dda-9ab3-403d5e33cee4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.544318 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.544372 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.544383 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1747b3e-a948-4dda-9ab3-403d5e33cee4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.572195 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.842069 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.845054 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:19 crc kubenswrapper[4852]: I1201 20:23:19.860610 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f8dc44d89-b8cwd"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.067834 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:20 crc kubenswrapper[4852]: E1201 20:23:20.068572 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" containerName="cinder-db-sync" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.068641 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" containerName="cinder-db-sync" Dec 01 20:23:20 crc kubenswrapper[4852]: E1201 20:23:20.068741 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="dnsmasq-dns" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.068791 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="dnsmasq-dns" Dec 01 20:23:20 crc kubenswrapper[4852]: E1201 20:23:20.068910 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="init" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.068966 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="init" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.069200 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" containerName="dnsmasq-dns" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.072486 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" containerName="cinder-db-sync" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.074423 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.085725 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-cqj4j" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.089777 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.090221 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.096373 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.152422 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.198669 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.198730 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88mcl\" (UniqueName: \"kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.198878 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.198915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.198938 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.199182 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.232415 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.233254 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.250681 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.253082 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.275177 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315617 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315684 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88mcl\" (UniqueName: \"kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315746 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315765 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315784 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.315866 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.316667 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.334035 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.348275 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.352121 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.366766 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88mcl\" (UniqueName: \"kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.374089 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts\") pod \"cinder-scheduler-0\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.404807 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1747b3e-a948-4dda-9ab3-403d5e33cee4" path="/var/lib/kubelet/pods/c1747b3e-a948-4dda-9ab3-403d5e33cee4/volumes" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423400 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423509 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nxwm\" (UniqueName: \"kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423543 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423565 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423589 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.423717 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.455949 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.467538 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.469360 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.473572 4852 generic.go:334] "Generic (PLEG): container finished" podID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerID="3af7518518380d5a325a7727ed537101bf669e44095130fcfe4a89a698311075" exitCode=0 Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.473624 4852 generic.go:334] "Generic (PLEG): container finished" podID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerID="04badb7a966d4ff03a4b686961f518ba22e8ea9c194801038b413442511cae89" exitCode=2 Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.473868 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerDied","Data":"3af7518518380d5a325a7727ed537101bf669e44095130fcfe4a89a698311075"} Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.473955 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerDied","Data":"04badb7a966d4ff03a4b686961f518ba22e8ea9c194801038b413442511cae89"} Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.477523 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.497579 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.556584 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nxwm\" (UniqueName: \"kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.556692 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.556726 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.556777 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.556997 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.557119 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.558446 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.564556 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.565178 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.565830 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.570573 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662698 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662865 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662885 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccj69\" (UniqueName: \"kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662944 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.662978 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.663003 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.677075 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nxwm\" (UniqueName: \"kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm\") pod \"dnsmasq-dns-c55f6679-z26fd\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.764904 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.764955 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.764988 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccj69\" (UniqueName: \"kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.765024 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.765052 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.765075 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.765113 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.767084 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.767183 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.780238 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.782301 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.785631 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.788738 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.792102 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccj69\" (UniqueName: \"kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69\") pod \"cinder-api-0\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.885189 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:20 crc kubenswrapper[4852]: I1201 20:23:20.897838 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:21 crc kubenswrapper[4852]: I1201 20:23:21.353937 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:21 crc kubenswrapper[4852]: W1201 20:23:21.369618 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0721155d_0b83_4767_8d74_492384444e68.slice/crio-ded8999d4ccd9083427943a4aec5dd5d37378e5732fc59a29fbe8bbd20da27b4 WatchSource:0}: Error finding container ded8999d4ccd9083427943a4aec5dd5d37378e5732fc59a29fbe8bbd20da27b4: Status 404 returned error can't find the container with id ded8999d4ccd9083427943a4aec5dd5d37378e5732fc59a29fbe8bbd20da27b4 Dec 01 20:23:21 crc kubenswrapper[4852]: I1201 20:23:21.502693 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerStarted","Data":"ded8999d4ccd9083427943a4aec5dd5d37378e5732fc59a29fbe8bbd20da27b4"} Dec 01 20:23:21 crc kubenswrapper[4852]: I1201 20:23:21.677606 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:21 crc kubenswrapper[4852]: I1201 20:23:21.751297 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.567531 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerStarted","Data":"c3b127e5294b99daaf4f4e4eaabb0f38d13818e5ac1e000d3fe9dea29b5b800a"} Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.583585 4852 generic.go:334] "Generic (PLEG): container finished" podID="119db55f-ccc9-47b3-b81c-c7907841b276" containerID="cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b" exitCode=0 Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.583727 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c55f6679-z26fd" event={"ID":"119db55f-ccc9-47b3-b81c-c7907841b276","Type":"ContainerDied","Data":"cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b"} Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.583770 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c55f6679-z26fd" event={"ID":"119db55f-ccc9-47b3-b81c-c7907841b276","Type":"ContainerStarted","Data":"46ff4c3617bf8798d6a0822398d5310165f885ce1591490b819fade80a8e6ebf"} Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.597619 4852 generic.go:334] "Generic (PLEG): container finished" podID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerID="0fda5311e5eb48480c323d4b3199e1648266ed56df1a435a9b4ca4fb07737ffb" exitCode=0 Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.597683 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerDied","Data":"0fda5311e5eb48480c323d4b3199e1648266ed56df1a435a9b4ca4fb07737ffb"} Dec 01 20:23:22 crc kubenswrapper[4852]: I1201 20:23:22.647853 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.117904 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.178323 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.190406 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.231675 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.293738 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fsvs\" (UniqueName: \"kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.293862 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.294052 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.294084 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.294135 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.294160 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle\") pod \"fa21b178-0e31-4c64-a0d7-59e622aa958e\" (UID: \"fa21b178-0e31-4c64-a0d7-59e622aa958e\") " Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.294744 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.296233 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.296849 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.302256 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts" (OuterVolumeSpecName: "scripts") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.311722 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs" (OuterVolumeSpecName: "kube-api-access-4fsvs") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "kube-api-access-4fsvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.397962 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.398343 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.398353 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa21b178-0e31-4c64-a0d7-59e622aa958e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.398362 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fsvs\" (UniqueName: \"kubernetes.io/projected/fa21b178-0e31-4c64-a0d7-59e622aa958e-kube-api-access-4fsvs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.404630 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.417203 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b6b9785cb-jncbj" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.430600 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data" (OuterVolumeSpecName: "config-data") pod "fa21b178-0e31-4c64-a0d7-59e622aa958e" (UID: "fa21b178-0e31-4c64-a0d7-59e622aa958e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.501945 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.501993 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa21b178-0e31-4c64-a0d7-59e622aa958e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.513118 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.513407 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-95bdd6c68-zqjgq" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" containerID="cri-o://46abf8cba3f5f8ca1faad6a8c48325ab13918dc745dea9651dfcb773194b1662" gracePeriod=30 Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.514136 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-95bdd6c68-zqjgq" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api" containerID="cri-o://5f5d00484af4034237001a135aadcdfe440886eb64ce1e86eebf37c1c959f96c" gracePeriod=30 Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.529278 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-95bdd6c68-zqjgq" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": EOF" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.651062 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerStarted","Data":"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231"} Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.654101 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c55f6679-z26fd" event={"ID":"119db55f-ccc9-47b3-b81c-c7907841b276","Type":"ContainerStarted","Data":"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff"} Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.655559 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.666536 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.666708 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa21b178-0e31-4c64-a0d7-59e622aa958e","Type":"ContainerDied","Data":"7677c355aae1352fb2227378bf3d7c0d21a1e06b990d310e22e462a86137eddd"} Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.666762 4852 scope.go:117] "RemoveContainer" containerID="3af7518518380d5a325a7727ed537101bf669e44095130fcfe4a89a698311075" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.681275 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c55f6679-z26fd" podStartSLOduration=3.681253487 podStartE2EDuration="3.681253487s" podCreationTimestamp="2025-12-01 20:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:23.674356311 +0000 UTC m=+1123.601437728" watchObservedRunningTime="2025-12-01 20:23:23.681253487 +0000 UTC m=+1123.608334904" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.766702 4852 scope.go:117] "RemoveContainer" containerID="04badb7a966d4ff03a4b686961f518ba22e8ea9c194801038b413442511cae89" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.774533 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.790895 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.805148 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:23 crc kubenswrapper[4852]: E1201 20:23:23.805870 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="proxy-httpd" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.805895 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="proxy-httpd" Dec 01 20:23:23 crc kubenswrapper[4852]: E1201 20:23:23.805957 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="ceilometer-notification-agent" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.805969 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="ceilometer-notification-agent" Dec 01 20:23:23 crc kubenswrapper[4852]: E1201 20:23:23.806023 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="sg-core" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.806033 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="sg-core" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.806330 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="sg-core" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.806354 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="ceilometer-notification-agent" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.806373 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" containerName="proxy-httpd" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.809679 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.817401 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.832583 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.832654 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:23:23 crc kubenswrapper[4852]: I1201 20:23:23.860936 4852 scope.go:117] "RemoveContainer" containerID="0fda5311e5eb48480c323d4b3199e1648266ed56df1a435a9b4ca4fb07737ffb" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014122 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgdll\" (UniqueName: \"kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014225 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014257 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014278 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014316 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014341 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.014371 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.118642 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119412 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119463 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119535 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119567 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119605 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.119678 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgdll\" (UniqueName: \"kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.122363 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.122840 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.136039 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.153064 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.153212 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.153914 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.155298 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgdll\" (UniqueName: \"kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll\") pod \"ceilometer-0\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.168107 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.350054 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa21b178-0e31-4c64-a0d7-59e622aa958e" path="/var/lib/kubelet/pods/fa21b178-0e31-4c64-a0d7-59e622aa958e/volumes" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.372865 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfd6f888-xxwbg" podUID="160a77b2-5ec6-4223-b939-8e90b339f530" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.605566 4852 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod2ea8042f-95a3-43d9-a653-6c61fc239d8e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod2ea8042f-95a3-43d9-a653-6c61fc239d8e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod2ea8042f_95a3_43d9_a653_6c61fc239d8e.slice" Dec 01 20:23:24 crc kubenswrapper[4852]: E1201 20:23:24.605634 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod2ea8042f-95a3-43d9-a653-6c61fc239d8e] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod2ea8042f-95a3-43d9-a653-6c61fc239d8e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod2ea8042f_95a3_43d9_a653_6c61fc239d8e.slice" pod="openstack/neutron-db-sync-k8mp8" podUID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.685289 4852 generic.go:334] "Generic (PLEG): container finished" podID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerID="46abf8cba3f5f8ca1faad6a8c48325ab13918dc745dea9651dfcb773194b1662" exitCode=143 Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.685872 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerDied","Data":"46abf8cba3f5f8ca1faad6a8c48325ab13918dc745dea9651dfcb773194b1662"} Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.695044 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerStarted","Data":"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461"} Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.701769 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api-log" containerID="cri-o://807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" gracePeriod=30 Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.702056 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerStarted","Data":"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819"} Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.702124 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-k8mp8" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.702599 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.702938 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api" containerID="cri-o://bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" gracePeriod=30 Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.756258 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.7562155619999995 podStartE2EDuration="4.756215562s" podCreationTimestamp="2025-12-01 20:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:24.721584696 +0000 UTC m=+1124.648666123" watchObservedRunningTime="2025-12-01 20:23:24.756215562 +0000 UTC m=+1124.683296979" Dec 01 20:23:24 crc kubenswrapper[4852]: I1201 20:23:24.778645 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.428906 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.602765 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.732798 4852 generic.go:334] "Generic (PLEG): container finished" podID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerID="5d2857c8b031fdea53b6b5866e39c037c604cccb04aa3788684953fa00c5dccf" exitCode=137 Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.732838 4852 generic.go:334] "Generic (PLEG): container finished" podID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerID="49c0bc829ec33a751d6ae77a82a9bf7cecdc0e6f8422bf140aa462f06e420073" exitCode=137 Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.732917 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerDied","Data":"5d2857c8b031fdea53b6b5866e39c037c604cccb04aa3788684953fa00c5dccf"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.732955 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerDied","Data":"49c0bc829ec33a751d6ae77a82a9bf7cecdc0e6f8422bf140aa462f06e420073"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.739801 4852 generic.go:334] "Generic (PLEG): container finished" podID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerID="bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" exitCode=0 Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.739845 4852 generic.go:334] "Generic (PLEG): container finished" podID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerID="807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" exitCode=143 Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.740022 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.740990 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerDied","Data":"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.741033 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerDied","Data":"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.741046 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"dadbe376-cf87-43c0-9774-b85e0cadfe55","Type":"ContainerDied","Data":"c3b127e5294b99daaf4f4e4eaabb0f38d13818e5ac1e000d3fe9dea29b5b800a"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.741064 4852 scope.go:117] "RemoveContainer" containerID="bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.752234 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerStarted","Data":"076a34cc9b47f587b71aae53dd10933e40570cd25b0166c6f0cb20e675a97eec"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.756035 4852 generic.go:334] "Generic (PLEG): container finished" podID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerID="ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50" exitCode=137 Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.756347 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerDied","Data":"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.762656 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerStarted","Data":"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7"} Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.789900 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.256422413 podStartE2EDuration="5.789875861s" podCreationTimestamp="2025-12-01 20:23:20 +0000 UTC" firstStartedPulling="2025-12-01 20:23:21.395700019 +0000 UTC m=+1121.322781436" lastFinishedPulling="2025-12-01 20:23:22.929153457 +0000 UTC m=+1122.856234884" observedRunningTime="2025-12-01 20:23:25.785022599 +0000 UTC m=+1125.712104026" watchObservedRunningTime="2025-12-01 20:23:25.789875861 +0000 UTC m=+1125.716957278" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.798949 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799161 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccj69\" (UniqueName: \"kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799296 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799423 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799532 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799713 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.799811 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom\") pod \"dadbe376-cf87-43c0-9774-b85e0cadfe55\" (UID: \"dadbe376-cf87-43c0-9774-b85e0cadfe55\") " Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.804894 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs" (OuterVolumeSpecName: "logs") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.804937 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.805247 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.814823 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69" (OuterVolumeSpecName: "kube-api-access-ccj69") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "kube-api-access-ccj69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.818831 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts" (OuterVolumeSpecName: "scripts") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.902067 4852 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.902541 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.902553 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccj69\" (UniqueName: \"kubernetes.io/projected/dadbe376-cf87-43c0-9774-b85e0cadfe55-kube-api-access-ccj69\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.902564 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dadbe376-cf87-43c0-9774-b85e0cadfe55-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.902574 4852 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dadbe376-cf87-43c0-9774-b85e0cadfe55-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.904781 4852 scope.go:117] "RemoveContainer" containerID="807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.937663 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data" (OuterVolumeSpecName: "config-data") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:25 crc kubenswrapper[4852]: I1201 20:23:25.950374 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dadbe376-cf87-43c0-9774-b85e0cadfe55" (UID: "dadbe376-cf87-43c0-9774-b85e0cadfe55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.005211 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.005252 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadbe376-cf87-43c0-9774-b85e0cadfe55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.068102 4852 scope.go:117] "RemoveContainer" containerID="bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" Dec 01 20:23:26 crc kubenswrapper[4852]: E1201 20:23:26.071764 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819\": container with ID starting with bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819 not found: ID does not exist" containerID="bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.071830 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819"} err="failed to get container status \"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819\": rpc error: code = NotFound desc = could not find container \"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819\": container with ID starting with bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819 not found: ID does not exist" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.071869 4852 scope.go:117] "RemoveContainer" containerID="807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" Dec 01 20:23:26 crc kubenswrapper[4852]: E1201 20:23:26.076501 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231\": container with ID starting with 807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231 not found: ID does not exist" containerID="807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.076573 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231"} err="failed to get container status \"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231\": rpc error: code = NotFound desc = could not find container \"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231\": container with ID starting with 807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231 not found: ID does not exist" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.076613 4852 scope.go:117] "RemoveContainer" containerID="bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.081421 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819"} err="failed to get container status \"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819\": rpc error: code = NotFound desc = could not find container \"bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819\": container with ID starting with bb1bbcb56c490fbb7c40fd2d06aae04707e266cbe1e9f578e1398653ef988819 not found: ID does not exist" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.081473 4852 scope.go:117] "RemoveContainer" containerID="807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.082284 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231"} err="failed to get container status \"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231\": rpc error: code = NotFound desc = could not find container \"807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231\": container with ID starting with 807877d98352392fb9606a9bdc4a97af46a926c63a5bff70a5c8bdcbaf244231 not found: ID does not exist" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.090350 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.104145 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.129580 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:26 crc kubenswrapper[4852]: E1201 20:23:26.130359 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.130427 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api" Dec 01 20:23:26 crc kubenswrapper[4852]: E1201 20:23:26.130510 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api-log" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.130560 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api-log" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.130810 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api-log" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.130870 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" containerName="cinder-api" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.131996 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.139623 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.139959 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.140989 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.153075 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211046 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211528 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-scripts\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211566 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211585 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211614 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211684 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f20e47a0-b3f2-48e5-baae-1e75e24377ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211724 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211746 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6zzm\" (UniqueName: \"kubernetes.io/projected/f20e47a0-b3f2-48e5-baae-1e75e24377ac-kube-api-access-k6zzm\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.211795 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f20e47a0-b3f2-48e5-baae-1e75e24377ac-logs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313042 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-scripts\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313093 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313114 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313136 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313197 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f20e47a0-b3f2-48e5-baae-1e75e24377ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313234 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313258 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6zzm\" (UniqueName: \"kubernetes.io/projected/f20e47a0-b3f2-48e5-baae-1e75e24377ac-kube-api-access-k6zzm\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313311 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f20e47a0-b3f2-48e5-baae-1e75e24377ac-logs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.313395 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.314033 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f20e47a0-b3f2-48e5-baae-1e75e24377ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.315252 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f20e47a0-b3f2-48e5-baae-1e75e24377ac-logs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.320489 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-768f876787-6snnf" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.322035 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-scripts\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.323931 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.328133 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.328297 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.331571 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-config-data\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.335597 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6zzm\" (UniqueName: \"kubernetes.io/projected/f20e47a0-b3f2-48e5-baae-1e75e24377ac-kube-api-access-k6zzm\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.336088 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f20e47a0-b3f2-48e5-baae-1e75e24377ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f20e47a0-b3f2-48e5-baae-1e75e24377ac\") " pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.355355 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dadbe376-cf87-43c0-9774-b85e0cadfe55" path="/var/lib/kubelet/pods/dadbe376-cf87-43c0-9774-b85e0cadfe55/volumes" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.416375 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdg8d\" (UniqueName: \"kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d\") pod \"c0d059ae-91d8-40a7-862b-7204dfc0b420\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.416489 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts\") pod \"c0d059ae-91d8-40a7-862b-7204dfc0b420\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.416586 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data\") pod \"c0d059ae-91d8-40a7-862b-7204dfc0b420\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.416663 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs\") pod \"c0d059ae-91d8-40a7-862b-7204dfc0b420\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.416806 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key\") pod \"c0d059ae-91d8-40a7-862b-7204dfc0b420\" (UID: \"c0d059ae-91d8-40a7-862b-7204dfc0b420\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.417570 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs" (OuterVolumeSpecName: "logs") pod "c0d059ae-91d8-40a7-862b-7204dfc0b420" (UID: "c0d059ae-91d8-40a7-862b-7204dfc0b420"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.423277 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d" (OuterVolumeSpecName: "kube-api-access-cdg8d") pod "c0d059ae-91d8-40a7-862b-7204dfc0b420" (UID: "c0d059ae-91d8-40a7-862b-7204dfc0b420"). InnerVolumeSpecName "kube-api-access-cdg8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.425568 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c0d059ae-91d8-40a7-862b-7204dfc0b420" (UID: "c0d059ae-91d8-40a7-862b-7204dfc0b420"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.451524 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts" (OuterVolumeSpecName: "scripts") pod "c0d059ae-91d8-40a7-862b-7204dfc0b420" (UID: "c0d059ae-91d8-40a7-862b-7204dfc0b420"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.462214 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.465747 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data" (OuterVolumeSpecName: "config-data") pod "c0d059ae-91d8-40a7-862b-7204dfc0b420" (UID: "c0d059ae-91d8-40a7-862b-7204dfc0b420"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.521005 4852 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0d059ae-91d8-40a7-862b-7204dfc0b420-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.521067 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdg8d\" (UniqueName: \"kubernetes.io/projected/c0d059ae-91d8-40a7-862b-7204dfc0b420-kube-api-access-cdg8d\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.521080 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.521089 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0d059ae-91d8-40a7-862b-7204dfc0b420-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.521097 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0d059ae-91d8-40a7-862b-7204dfc0b420-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.550718 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.727374 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts\") pod \"8cdd0646-f58e-47b0-b562-1db787dd489d\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.728028 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhzjd\" (UniqueName: \"kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd\") pod \"8cdd0646-f58e-47b0-b562-1db787dd489d\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.728203 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data\") pod \"8cdd0646-f58e-47b0-b562-1db787dd489d\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.728392 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs\") pod \"8cdd0646-f58e-47b0-b562-1db787dd489d\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.728434 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key\") pod \"8cdd0646-f58e-47b0-b562-1db787dd489d\" (UID: \"8cdd0646-f58e-47b0-b562-1db787dd489d\") " Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.729358 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs" (OuterVolumeSpecName: "logs") pod "8cdd0646-f58e-47b0-b562-1db787dd489d" (UID: "8cdd0646-f58e-47b0-b562-1db787dd489d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.735862 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd" (OuterVolumeSpecName: "kube-api-access-xhzjd") pod "8cdd0646-f58e-47b0-b562-1db787dd489d" (UID: "8cdd0646-f58e-47b0-b562-1db787dd489d"). InnerVolumeSpecName "kube-api-access-xhzjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.743119 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8cdd0646-f58e-47b0-b562-1db787dd489d" (UID: "8cdd0646-f58e-47b0-b562-1db787dd489d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.760309 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data" (OuterVolumeSpecName: "config-data") pod "8cdd0646-f58e-47b0-b562-1db787dd489d" (UID: "8cdd0646-f58e-47b0-b562-1db787dd489d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.767121 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts" (OuterVolumeSpecName: "scripts") pod "8cdd0646-f58e-47b0-b562-1db787dd489d" (UID: "8cdd0646-f58e-47b0-b562-1db787dd489d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.778333 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-768f876787-6snnf" event={"ID":"c0d059ae-91d8-40a7-862b-7204dfc0b420","Type":"ContainerDied","Data":"49ee3e596d6feeff5317311e37747ad295d4490e3ebe12bd74e6692911aeed44"} Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.778415 4852 scope.go:117] "RemoveContainer" containerID="5d2857c8b031fdea53b6b5866e39c037c604cccb04aa3788684953fa00c5dccf" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.778636 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-768f876787-6snnf" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.784176 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerStarted","Data":"c4728a81a164da5503ec8edc3262f304958949f4fbc4a09f0a7deee8a3173ab1"} Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.787432 4852 generic.go:334] "Generic (PLEG): container finished" podID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerID="61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff" exitCode=137 Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.788551 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bd8d6ff69-cmc78" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.789762 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerDied","Data":"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff"} Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.789860 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bd8d6ff69-cmc78" event={"ID":"8cdd0646-f58e-47b0-b562-1db787dd489d","Type":"ContainerDied","Data":"ecebd211af3bb5460de2097a3ff37cc213c3ca7c3f6a3a406320fc19c250d11f"} Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.830617 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.830664 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhzjd\" (UniqueName: \"kubernetes.io/projected/8cdd0646-f58e-47b0-b562-1db787dd489d-kube-api-access-xhzjd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.830679 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdd0646-f58e-47b0-b562-1db787dd489d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.830688 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdd0646-f58e-47b0-b562-1db787dd489d-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.830697 4852 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdd0646-f58e-47b0-b562-1db787dd489d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.906194 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.915219 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-768f876787-6snnf"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.923826 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.947609 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6bd8d6ff69-cmc78"] Dec 01 20:23:26 crc kubenswrapper[4852]: I1201 20:23:26.990734 4852 scope.go:117] "RemoveContainer" containerID="49c0bc829ec33a751d6ae77a82a9bf7cecdc0e6f8422bf140aa462f06e420073" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.018784 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.026407 4852 scope.go:117] "RemoveContainer" containerID="61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.396611 4852 scope.go:117] "RemoveContainer" containerID="ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.430550 4852 scope.go:117] "RemoveContainer" containerID="61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff" Dec 01 20:23:27 crc kubenswrapper[4852]: E1201 20:23:27.431211 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff\": container with ID starting with 61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff not found: ID does not exist" containerID="61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.431256 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff"} err="failed to get container status \"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff\": rpc error: code = NotFound desc = could not find container \"61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff\": container with ID starting with 61ef720b1f83d089bd0a7d62d791e5422e54102d4208cb40889f7905a5ff4bff not found: ID does not exist" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.431280 4852 scope.go:117] "RemoveContainer" containerID="ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50" Dec 01 20:23:27 crc kubenswrapper[4852]: E1201 20:23:27.431651 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50\": container with ID starting with ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50 not found: ID does not exist" containerID="ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.431676 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50"} err="failed to get container status \"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50\": rpc error: code = NotFound desc = could not find container \"ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50\": container with ID starting with ae476d0f62804daaca866e55becdf8560c1bdfdd5c7e3416b7cf42fec57c3a50 not found: ID does not exist" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.691560 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.809349 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerStarted","Data":"282bc1d3cc6fa5e631b80a46763e5023be4811e55bb98cca75f29b0f33699bcd"} Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.812576 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f20e47a0-b3f2-48e5-baae-1e75e24377ac","Type":"ContainerStarted","Data":"596a450e894a6fe82378cd1be1324b499c42a664a57bc4e0ae053973805e8466"} Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.812648 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f20e47a0-b3f2-48e5-baae-1e75e24377ac","Type":"ContainerStarted","Data":"ef8ed115e9f7eaff3a0a2cd26b96ad5aea20362d5b0b736731b60cdfe05f39c1"} Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.997708 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-95bdd6c68-zqjgq" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:34120->10.217.0.158:9311: read: connection reset by peer" Dec 01 20:23:27 crc kubenswrapper[4852]: I1201 20:23:27.997868 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-95bdd6c68-zqjgq" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:34110->10.217.0.158:9311: read: connection reset by peer" Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.223188 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55686cd79f-5wjtq" Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.347914 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" path="/var/lib/kubelet/pods/8cdd0646-f58e-47b0-b562-1db787dd489d/volumes" Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.349103 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" path="/var/lib/kubelet/pods/c0d059ae-91d8-40a7-862b-7204dfc0b420/volumes" Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.350035 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.350378 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cd4765d7b-hrwph" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-api" containerID="cri-o://f2578a68a9ff23eee774dceb2db68c13807f48a0c27ecea4f39011272f7755f2" gracePeriod=30 Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.350481 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5cd4765d7b-hrwph" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-httpd" containerID="cri-o://2a97efa173c06e0793777a5c5b4ab0b3077929e081edd4902bf2777a6f2553cf" gracePeriod=30 Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.828811 4852 generic.go:334] "Generic (PLEG): container finished" podID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerID="5f5d00484af4034237001a135aadcdfe440886eb64ce1e86eebf37c1c959f96c" exitCode=0 Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.828998 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerDied","Data":"5f5d00484af4034237001a135aadcdfe440886eb64ce1e86eebf37c1c959f96c"} Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.866667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f20e47a0-b3f2-48e5-baae-1e75e24377ac","Type":"ContainerStarted","Data":"b9e58af1ce7dbeca03668f0f16288c6694596e95001563b4706947a7a667c1d3"} Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.868144 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.876881 4852 generic.go:334] "Generic (PLEG): container finished" podID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerID="2a97efa173c06e0793777a5c5b4ab0b3077929e081edd4902bf2777a6f2553cf" exitCode=0 Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.876935 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerDied","Data":"2a97efa173c06e0793777a5c5b4ab0b3077929e081edd4902bf2777a6f2553cf"} Dec 01 20:23:28 crc kubenswrapper[4852]: I1201 20:23:28.909661 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.909637667 podStartE2EDuration="2.909637667s" podCreationTimestamp="2025-12-01 20:23:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:28.895947457 +0000 UTC m=+1128.823028874" watchObservedRunningTime="2025-12-01 20:23:28.909637667 +0000 UTC m=+1128.836719084" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.011193 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.086120 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dv8f\" (UniqueName: \"kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f\") pod \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.086177 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom\") pod \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.086326 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs\") pod \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.086407 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle\") pod \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.086537 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data\") pod \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\" (UID: \"aa9160bb-32cb-4617-8ecf-3ca078d2008a\") " Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.087662 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs" (OuterVolumeSpecName: "logs") pod "aa9160bb-32cb-4617-8ecf-3ca078d2008a" (UID: "aa9160bb-32cb-4617-8ecf-3ca078d2008a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.092689 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aa9160bb-32cb-4617-8ecf-3ca078d2008a" (UID: "aa9160bb-32cb-4617-8ecf-3ca078d2008a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.094166 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f" (OuterVolumeSpecName: "kube-api-access-7dv8f") pod "aa9160bb-32cb-4617-8ecf-3ca078d2008a" (UID: "aa9160bb-32cb-4617-8ecf-3ca078d2008a"). InnerVolumeSpecName "kube-api-access-7dv8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.148940 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa9160bb-32cb-4617-8ecf-3ca078d2008a" (UID: "aa9160bb-32cb-4617-8ecf-3ca078d2008a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.169254 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data" (OuterVolumeSpecName: "config-data") pod "aa9160bb-32cb-4617-8ecf-3ca078d2008a" (UID: "aa9160bb-32cb-4617-8ecf-3ca078d2008a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.188816 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dv8f\" (UniqueName: \"kubernetes.io/projected/aa9160bb-32cb-4617-8ecf-3ca078d2008a-kube-api-access-7dv8f\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.188877 4852 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.188894 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa9160bb-32cb-4617-8ecf-3ca078d2008a-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.188909 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.188920 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa9160bb-32cb-4617-8ecf-3ca078d2008a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.500267 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.889159 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-95bdd6c68-zqjgq" event={"ID":"aa9160bb-32cb-4617-8ecf-3ca078d2008a","Type":"ContainerDied","Data":"e3ebebff086ec9031ad9dfdd1bc6fdfbbd16134cbfc584c019be829942365f0e"} Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.889244 4852 scope.go:117] "RemoveContainer" containerID="5f5d00484af4034237001a135aadcdfe440886eb64ce1e86eebf37c1c959f96c" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.889299 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-95bdd6c68-zqjgq" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.893730 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerStarted","Data":"65315aa2b20e6cc2604c6107843fae180850ec8e03b92b7a6860b9bfbc3ae95f"} Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.929225 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.929239 4852 scope.go:117] "RemoveContainer" containerID="46abf8cba3f5f8ca1faad6a8c48325ab13918dc745dea9651dfcb773194b1662" Dec 01 20:23:29 crc kubenswrapper[4852]: I1201 20:23:29.941564 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-95bdd6c68-zqjgq"] Dec 01 20:23:30 crc kubenswrapper[4852]: I1201 20:23:30.354988 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" path="/var/lib/kubelet/pods/aa9160bb-32cb-4617-8ecf-3ca078d2008a/volumes" Dec 01 20:23:30 crc kubenswrapper[4852]: I1201 20:23:30.457736 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 20:23:30 crc kubenswrapper[4852]: I1201 20:23:30.723804 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 20:23:30 crc kubenswrapper[4852]: I1201 20:23:30.906774 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.032185 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.033002 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="dnsmasq-dns" containerID="cri-o://7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487" gracePeriod=10 Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.049602 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.551240 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.669851 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.670094 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.670140 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jssd\" (UniqueName: \"kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.670259 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.670286 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.670371 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc\") pod \"16b547de-9574-4465-94e3-054b5013ece5\" (UID: \"16b547de-9574-4465-94e3-054b5013ece5\") " Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.676357 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd" (OuterVolumeSpecName: "kube-api-access-9jssd") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "kube-api-access-9jssd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.762798 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.764858 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.765090 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.767343 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.772928 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.772964 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.772992 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.773004 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.773014 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jssd\" (UniqueName: \"kubernetes.io/projected/16b547de-9574-4465-94e3-054b5013ece5-kube-api-access-9jssd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.788548 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config" (OuterVolumeSpecName: "config") pod "16b547de-9574-4465-94e3-054b5013ece5" (UID: "16b547de-9574-4465-94e3-054b5013ece5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.875498 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b547de-9574-4465-94e3-054b5013ece5-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.924114 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerStarted","Data":"78bddec1e1a48a38ef379e8aab54ef42a518d40dfc77962e0c00c5cbc9b8d065"} Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.927378 4852 generic.go:334] "Generic (PLEG): container finished" podID="16b547de-9574-4465-94e3-054b5013ece5" containerID="7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487" exitCode=0 Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.927524 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" event={"ID":"16b547de-9574-4465-94e3-054b5013ece5","Type":"ContainerDied","Data":"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487"} Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.927612 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" event={"ID":"16b547de-9574-4465-94e3-054b5013ece5","Type":"ContainerDied","Data":"2f81d434f483e4dfe44b02c279859ef46be9aac301e388961f512fe17c259bd5"} Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.927661 4852 scope.go:117] "RemoveContainer" containerID="7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.927718 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf96b7dc5-2q6zp" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.930376 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="cinder-scheduler" containerID="cri-o://086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461" gracePeriod=30 Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.930443 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="probe" containerID="cri-o://5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7" gracePeriod=30 Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.955639 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.133360785 podStartE2EDuration="8.955611117s" podCreationTimestamp="2025-12-01 20:23:23 +0000 UTC" firstStartedPulling="2025-12-01 20:23:24.821328377 +0000 UTC m=+1124.748409814" lastFinishedPulling="2025-12-01 20:23:30.643578729 +0000 UTC m=+1130.570660146" observedRunningTime="2025-12-01 20:23:31.948563315 +0000 UTC m=+1131.875644742" watchObservedRunningTime="2025-12-01 20:23:31.955611117 +0000 UTC m=+1131.882692534" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.964187 4852 scope.go:117] "RemoveContainer" containerID="acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182" Dec 01 20:23:31 crc kubenswrapper[4852]: I1201 20:23:31.994691 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.002741 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf96b7dc5-2q6zp"] Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.004496 4852 scope.go:117] "RemoveContainer" containerID="7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487" Dec 01 20:23:32 crc kubenswrapper[4852]: E1201 20:23:32.005313 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487\": container with ID starting with 7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487 not found: ID does not exist" containerID="7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487" Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.005379 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487"} err="failed to get container status \"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487\": rpc error: code = NotFound desc = could not find container \"7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487\": container with ID starting with 7130258a3ad4baee2c78613f7139b69d9f41daaa0034e21b60be6ed19c725487 not found: ID does not exist" Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.005420 4852 scope.go:117] "RemoveContainer" containerID="acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182" Dec 01 20:23:32 crc kubenswrapper[4852]: E1201 20:23:32.006080 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182\": container with ID starting with acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182 not found: ID does not exist" containerID="acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182" Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.006185 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182"} err="failed to get container status \"acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182\": rpc error: code = NotFound desc = could not find container \"acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182\": container with ID starting with acbab570f406a9984d94837ae48be2b82339a68862b81a0f6b5eb5b1ed4c8182 not found: ID does not exist" Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.332599 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16b547de-9574-4465-94e3-054b5013ece5" path="/var/lib/kubelet/pods/16b547de-9574-4465-94e3-054b5013ece5/volumes" Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.954554 4852 generic.go:334] "Generic (PLEG): container finished" podID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerID="f2578a68a9ff23eee774dceb2db68c13807f48a0c27ecea4f39011272f7755f2" exitCode=0 Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.954629 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerDied","Data":"f2578a68a9ff23eee774dceb2db68c13807f48a0c27ecea4f39011272f7755f2"} Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.960308 4852 generic.go:334] "Generic (PLEG): container finished" podID="0721155d-0b83-4767-8d74-492384444e68" containerID="5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7" exitCode=0 Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.960856 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerDied","Data":"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7"} Dec 01 20:23:32 crc kubenswrapper[4852]: I1201 20:23:32.960941 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.412099 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.512708 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trl4r\" (UniqueName: \"kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r\") pod \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.512929 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs\") pod \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.512972 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config\") pod \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.513066 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config\") pod \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.513182 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle\") pod \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\" (UID: \"f9f6adbd-d5d8-4057-84ad-fd835e718a8e\") " Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.520280 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r" (OuterVolumeSpecName: "kube-api-access-trl4r") pod "f9f6adbd-d5d8-4057-84ad-fd835e718a8e" (UID: "f9f6adbd-d5d8-4057-84ad-fd835e718a8e"). InnerVolumeSpecName "kube-api-access-trl4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.520908 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f9f6adbd-d5d8-4057-84ad-fd835e718a8e" (UID: "f9f6adbd-d5d8-4057-84ad-fd835e718a8e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.573079 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config" (OuterVolumeSpecName: "config") pod "f9f6adbd-d5d8-4057-84ad-fd835e718a8e" (UID: "f9f6adbd-d5d8-4057-84ad-fd835e718a8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.593479 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9f6adbd-d5d8-4057-84ad-fd835e718a8e" (UID: "f9f6adbd-d5d8-4057-84ad-fd835e718a8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.615891 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trl4r\" (UniqueName: \"kubernetes.io/projected/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-kube-api-access-trl4r\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.616135 4852 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.616282 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.616341 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.624747 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f9f6adbd-d5d8-4057-84ad-fd835e718a8e" (UID: "f9f6adbd-d5d8-4057-84ad-fd835e718a8e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.717957 4852 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9f6adbd-d5d8-4057-84ad-fd835e718a8e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.987913 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cd4765d7b-hrwph" event={"ID":"f9f6adbd-d5d8-4057-84ad-fd835e718a8e","Type":"ContainerDied","Data":"23dcbb4b9feffeadc51bdd80c9fab04af1037ec426b56d8cd703cebafc1dae0f"} Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.990426 4852 scope.go:117] "RemoveContainer" containerID="2a97efa173c06e0793777a5c5b4ab0b3077929e081edd4902bf2777a6f2553cf" Dec 01 20:23:33 crc kubenswrapper[4852]: I1201 20:23:33.987984 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cd4765d7b-hrwph" Dec 01 20:23:34 crc kubenswrapper[4852]: I1201 20:23:34.038114 4852 scope.go:117] "RemoveContainer" containerID="f2578a68a9ff23eee774dceb2db68c13807f48a0c27ecea4f39011272f7755f2" Dec 01 20:23:34 crc kubenswrapper[4852]: I1201 20:23:34.055182 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:23:34 crc kubenswrapper[4852]: I1201 20:23:34.074221 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5cd4765d7b-hrwph"] Dec 01 20:23:34 crc kubenswrapper[4852]: I1201 20:23:34.338964 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" path="/var/lib/kubelet/pods/f9f6adbd-d5d8-4057-84ad-fd835e718a8e/volumes" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.256444 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.828694 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.913663 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.913730 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.913841 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.913960 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88mcl\" (UniqueName: \"kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.913983 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.914085 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts\") pod \"0721155d-0b83-4767-8d74-492384444e68\" (UID: \"0721155d-0b83-4767-8d74-492384444e68\") " Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.915291 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.924392 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl" (OuterVolumeSpecName: "kube-api-access-88mcl") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "kube-api-access-88mcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.924608 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts" (OuterVolumeSpecName: "scripts") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.929122 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:36 crc kubenswrapper[4852]: I1201 20:23:36.993717 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.016227 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88mcl\" (UniqueName: \"kubernetes.io/projected/0721155d-0b83-4767-8d74-492384444e68-kube-api-access-88mcl\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.016590 4852 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.016650 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.016744 4852 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0721155d-0b83-4767-8d74-492384444e68-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.016802 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.031843 4852 generic.go:334] "Generic (PLEG): container finished" podID="0721155d-0b83-4767-8d74-492384444e68" containerID="086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461" exitCode=0 Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.032432 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerDied","Data":"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461"} Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.032592 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0721155d-0b83-4767-8d74-492384444e68","Type":"ContainerDied","Data":"ded8999d4ccd9083427943a4aec5dd5d37378e5732fc59a29fbe8bbd20da27b4"} Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.032677 4852 scope.go:117] "RemoveContainer" containerID="5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.032924 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.049606 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data" (OuterVolumeSpecName: "config-data") pod "0721155d-0b83-4767-8d74-492384444e68" (UID: "0721155d-0b83-4767-8d74-492384444e68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.082553 4852 scope.go:117] "RemoveContainer" containerID="086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.118930 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0721155d-0b83-4767-8d74-492384444e68-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.133998 4852 scope.go:117] "RemoveContainer" containerID="5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.134629 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7\": container with ID starting with 5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7 not found: ID does not exist" containerID="5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.134693 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7"} err="failed to get container status \"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7\": rpc error: code = NotFound desc = could not find container \"5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7\": container with ID starting with 5a8971f54130ca02b06f03f8829a94c34a93d7e55efb31735c6cee46c3193aa7 not found: ID does not exist" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.134729 4852 scope.go:117] "RemoveContainer" containerID="086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.135208 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461\": container with ID starting with 086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461 not found: ID does not exist" containerID="086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.135232 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461"} err="failed to get container status \"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461\": rpc error: code = NotFound desc = could not find container \"086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461\": container with ID starting with 086be80c06085ba59117d875cee506eef68ed9d55b9a087c26d26d433cdbf461 not found: ID does not exist" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.392124 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.407774 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.434673 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435104 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435123 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435145 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="cinder-scheduler" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435155 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="cinder-scheduler" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435170 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435178 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435193 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="init" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435199 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="init" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435212 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435218 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435239 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-httpd" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435245 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-httpd" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435256 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-api" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435262 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-api" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435274 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435280 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435290 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="dnsmasq-dns" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435295 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="dnsmasq-dns" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435305 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="probe" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435313 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="probe" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435325 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435333 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api" Dec 01 20:23:37 crc kubenswrapper[4852]: E1201 20:23:37.435343 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435348 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435556 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="cinder-scheduler" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435572 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435584 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435592 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdd0646-f58e-47b0-b562-1db787dd489d" containerName="horizon" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435603 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435618 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-httpd" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435626 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0d059ae-91d8-40a7-862b-7204dfc0b420" containerName="horizon-log" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435638 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f6adbd-d5d8-4057-84ad-fd835e718a8e" containerName="neutron-api" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435647 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b547de-9574-4465-94e3-054b5013ece5" containerName="dnsmasq-dns" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435653 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9160bb-32cb-4617-8ecf-3ca078d2008a" containerName="barbican-api" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.435663 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0721155d-0b83-4767-8d74-492384444e68" containerName="probe" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.440060 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.443758 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.454582 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527272 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527347 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdl69\" (UniqueName: \"kubernetes.io/projected/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-kube-api-access-tdl69\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527522 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527593 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527617 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.527881 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.629927 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.630016 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.630046 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.630105 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.630151 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.630202 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdl69\" (UniqueName: \"kubernetes.io/projected/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-kube-api-access-tdl69\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.632431 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.637389 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.638301 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.652468 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.653433 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.655889 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdl69\" (UniqueName: \"kubernetes.io/projected/9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a-kube-api-access-tdl69\") pod \"cinder-scheduler-0\" (UID: \"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a\") " pod="openstack/cinder-scheduler-0" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.709080 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.726076 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-fd7b4cb9d-8zvhn" Dec 01 20:23:37 crc kubenswrapper[4852]: I1201 20:23:37.775127 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.331230 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0721155d-0b83-4767-8d74-492384444e68" path="/var/lib/kubelet/pods/0721155d-0b83-4767-8d74-492384444e68/volumes" Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.335550 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.398426 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-dfd6f888-xxwbg" Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.399169 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-574c7f8dfc-6k2xn" Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.539849 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.541604 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" containerID="cri-o://65ae73baf982279b1029ae39d9022724da41a50922efb382d9bb761c1c4b1753" gracePeriod=30 Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.541785 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon-log" containerID="cri-o://93647e0225eee6336edcf0baca3971246c9732cc76340cbfb6ab71e6e1227f4e" gracePeriod=30 Dec 01 20:23:38 crc kubenswrapper[4852]: I1201 20:23:38.715786 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 20:23:39 crc kubenswrapper[4852]: I1201 20:23:39.073024 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a","Type":"ContainerStarted","Data":"05b3811f08a00f3899f051c4f278e14a5e10de63920863fb0618b9449c73c3ef"} Dec 01 20:23:40 crc kubenswrapper[4852]: I1201 20:23:40.085278 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a","Type":"ContainerStarted","Data":"58231e856205ea1c94a611bf546c1239fd9ca1d7afbecaa4aaf13579b3c5340d"} Dec 01 20:23:40 crc kubenswrapper[4852]: I1201 20:23:40.085700 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a","Type":"ContainerStarted","Data":"4f71ea53b35d6e2dc3bfa0960b2dd7aedb7ad06a10947a528eca4cf1121f7ee5"} Dec 01 20:23:40 crc kubenswrapper[4852]: I1201 20:23:40.113777 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.113753387 podStartE2EDuration="3.113753387s" podCreationTimestamp="2025-12-01 20:23:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:40.10714057 +0000 UTC m=+1140.034221987" watchObservedRunningTime="2025-12-01 20:23:40.113753387 +0000 UTC m=+1140.040834814" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.315047 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.317014 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.345348 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.347968 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.349134 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.349440 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-b5g74" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.481173 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.481279 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.481691 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config-secret\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.481805 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkqp8\" (UniqueName: \"kubernetes.io/projected/49382464-d20a-4ec7-9096-5679b0fc12b7-kube-api-access-dkqp8\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.583917 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config-secret\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.583981 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkqp8\" (UniqueName: \"kubernetes.io/projected/49382464-d20a-4ec7-9096-5679b0fc12b7-kube-api-access-dkqp8\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.584061 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.584104 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.585156 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.594566 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-openstack-config-secret\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.595119 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49382464-d20a-4ec7-9096-5679b0fc12b7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.608536 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkqp8\" (UniqueName: \"kubernetes.io/projected/49382464-d20a-4ec7-9096-5679b0fc12b7-kube-api-access-dkqp8\") pod \"openstackclient\" (UID: \"49382464-d20a-4ec7-9096-5679b0fc12b7\") " pod="openstack/openstackclient" Dec 01 20:23:41 crc kubenswrapper[4852]: I1201 20:23:41.656673 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 20:23:42 crc kubenswrapper[4852]: I1201 20:23:42.110559 4852 generic.go:334] "Generic (PLEG): container finished" podID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerID="65ae73baf982279b1029ae39d9022724da41a50922efb382d9bb761c1c4b1753" exitCode=0 Dec 01 20:23:42 crc kubenswrapper[4852]: I1201 20:23:42.110643 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerDied","Data":"65ae73baf982279b1029ae39d9022724da41a50922efb382d9bb761c1c4b1753"} Dec 01 20:23:42 crc kubenswrapper[4852]: I1201 20:23:42.141404 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 20:23:42 crc kubenswrapper[4852]: W1201 20:23:42.145777 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49382464_d20a_4ec7_9096_5679b0fc12b7.slice/crio-44b2693ae31f4d9eed8442e76cc1e285d7d6701462f7e548409decc7bf7e0556 WatchSource:0}: Error finding container 44b2693ae31f4d9eed8442e76cc1e285d7d6701462f7e548409decc7bf7e0556: Status 404 returned error can't find the container with id 44b2693ae31f4d9eed8442e76cc1e285d7d6701462f7e548409decc7bf7e0556 Dec 01 20:23:42 crc kubenswrapper[4852]: I1201 20:23:42.775806 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 20:23:43 crc kubenswrapper[4852]: I1201 20:23:43.137159 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"49382464-d20a-4ec7-9096-5679b0fc12b7","Type":"ContainerStarted","Data":"44b2693ae31f4d9eed8442e76cc1e285d7d6701462f7e548409decc7bf7e0556"} Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.242650 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.760333 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-cd9d56787-qlkbk"] Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.768217 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.773235 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.773553 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.773688 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.782535 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-cd9d56787-qlkbk"] Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862306 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-public-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862426 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-internal-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862521 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2s4x\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-kube-api-access-b2s4x\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862562 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-config-data\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862584 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-etc-swift\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862609 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-log-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862732 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-combined-ca-bundle\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.862831 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-run-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965033 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-combined-ca-bundle\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965460 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-run-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965554 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-public-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965614 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-internal-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965646 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2s4x\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-kube-api-access-b2s4x\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965682 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-config-data\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965747 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-etc-swift\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.965778 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-log-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.968192 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-log-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.968840 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-run-httpd\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.974363 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-etc-swift\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.975068 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-internal-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.975191 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-config-data\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.975751 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-combined-ca-bundle\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.984166 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2s4x\" (UniqueName: \"kubernetes.io/projected/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-kube-api-access-b2s4x\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:44 crc kubenswrapper[4852]: I1201 20:23:44.985276 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c-public-tls-certs\") pod \"swift-proxy-cd9d56787-qlkbk\" (UID: \"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c\") " pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:45 crc kubenswrapper[4852]: I1201 20:23:45.114172 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:45 crc kubenswrapper[4852]: I1201 20:23:45.790089 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-cd9d56787-qlkbk"] Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.218101 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-cd9d56787-qlkbk" event={"ID":"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c","Type":"ContainerStarted","Data":"f97c5fc953853cbf741a149c596a07bc3c7dcd4267c4f0cf76a8217cba6df5e2"} Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.218157 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-cd9d56787-qlkbk" event={"ID":"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c","Type":"ContainerStarted","Data":"06fc30211520285561776f99f611ada548587dbf221b30e3a4976bc92075c51d"} Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.388170 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.390852 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-central-agent" containerID="cri-o://c4728a81a164da5503ec8edc3262f304958949f4fbc4a09f0a7deee8a3173ab1" gracePeriod=30 Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.391153 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="proxy-httpd" containerID="cri-o://78bddec1e1a48a38ef379e8aab54ef42a518d40dfc77962e0c00c5cbc9b8d065" gracePeriod=30 Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.391634 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-notification-agent" containerID="cri-o://282bc1d3cc6fa5e631b80a46763e5023be4811e55bb98cca75f29b0f33699bcd" gracePeriod=30 Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.391663 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="sg-core" containerID="cri-o://65315aa2b20e6cc2604c6107843fae180850ec8e03b92b7a6860b9bfbc3ae95f" gracePeriod=30 Dec 01 20:23:46 crc kubenswrapper[4852]: I1201 20:23:46.399597 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.233946 4852 generic.go:334] "Generic (PLEG): container finished" podID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerID="78bddec1e1a48a38ef379e8aab54ef42a518d40dfc77962e0c00c5cbc9b8d065" exitCode=0 Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.234316 4852 generic.go:334] "Generic (PLEG): container finished" podID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerID="65315aa2b20e6cc2604c6107843fae180850ec8e03b92b7a6860b9bfbc3ae95f" exitCode=2 Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.234326 4852 generic.go:334] "Generic (PLEG): container finished" podID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerID="c4728a81a164da5503ec8edc3262f304958949f4fbc4a09f0a7deee8a3173ab1" exitCode=0 Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.234021 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerDied","Data":"78bddec1e1a48a38ef379e8aab54ef42a518d40dfc77962e0c00c5cbc9b8d065"} Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.234397 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerDied","Data":"65315aa2b20e6cc2604c6107843fae180850ec8e03b92b7a6860b9bfbc3ae95f"} Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.234414 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerDied","Data":"c4728a81a164da5503ec8edc3262f304958949f4fbc4a09f0a7deee8a3173ab1"} Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.237667 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-cd9d56787-qlkbk" event={"ID":"aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c","Type":"ContainerStarted","Data":"db224cfb833722c968831974de0fce4db921fa6aa7ba828ba16d7ff3cbd7988b"} Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.238438 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.238652 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:47 crc kubenswrapper[4852]: I1201 20:23:47.268864 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-cd9d56787-qlkbk" podStartSLOduration=3.268839382 podStartE2EDuration="3.268839382s" podCreationTimestamp="2025-12-01 20:23:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:47.260755778 +0000 UTC m=+1147.187837215" watchObservedRunningTime="2025-12-01 20:23:47.268839382 +0000 UTC m=+1147.195920799" Dec 01 20:23:48 crc kubenswrapper[4852]: I1201 20:23:48.208094 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.230421 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.231307 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.231371 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.232316 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.232391 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32" gracePeriod=600 Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.278547 4852 generic.go:334] "Generic (PLEG): container finished" podID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerID="282bc1d3cc6fa5e631b80a46763e5023be4811e55bb98cca75f29b0f33699bcd" exitCode=0 Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.278605 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerDied","Data":"282bc1d3cc6fa5e631b80a46763e5023be4811e55bb98cca75f29b0f33699bcd"} Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.931405 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:50 crc kubenswrapper[4852]: I1201 20:23:50.931662 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerName="kube-state-metrics" containerID="cri-o://1a3020dd02aaa6a46b248b982f5497dd2bb5178a04e3e26688b42ba73051ade6" gracePeriod=30 Dec 01 20:23:51 crc kubenswrapper[4852]: I1201 20:23:51.293033 4852 generic.go:334] "Generic (PLEG): container finished" podID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerID="1a3020dd02aaa6a46b248b982f5497dd2bb5178a04e3e26688b42ba73051ade6" exitCode=2 Dec 01 20:23:51 crc kubenswrapper[4852]: I1201 20:23:51.293232 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b634d10f-beff-4ef8-8602-8e4acb8a5a4b","Type":"ContainerDied","Data":"1a3020dd02aaa6a46b248b982f5497dd2bb5178a04e3e26688b42ba73051ade6"} Dec 01 20:23:51 crc kubenswrapper[4852]: I1201 20:23:51.298593 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32" exitCode=0 Dec 01 20:23:51 crc kubenswrapper[4852]: I1201 20:23:51.298673 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32"} Dec 01 20:23:51 crc kubenswrapper[4852]: I1201 20:23:51.298746 4852 scope.go:117] "RemoveContainer" containerID="6f73ea9db4bc154e679740cf30c147a3e0cc18bfea0a3cba718640a8472b3f3e" Dec 01 20:23:52 crc kubenswrapper[4852]: I1201 20:23:52.050666 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.103:8081/readyz\": dial tcp 10.217.0.103:8081: connect: connection refused" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.091684 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.153947 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.162182 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz2zz\" (UniqueName: \"kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz\") pod \"b634d10f-beff-4ef8-8602-8e4acb8a5a4b\" (UID: \"b634d10f-beff-4ef8-8602-8e4acb8a5a4b\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.169308 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz" (OuterVolumeSpecName: "kube-api-access-qz2zz") pod "b634d10f-beff-4ef8-8602-8e4acb8a5a4b" (UID: "b634d10f-beff-4ef8-8602-8e4acb8a5a4b"). InnerVolumeSpecName "kube-api-access-qz2zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.263816 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.263893 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.263942 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264056 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264111 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264136 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264164 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgdll\" (UniqueName: \"kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll\") pod \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\" (UID: \"36183a17-928d-43a5-b1ac-7b8ebb8335c3\") " Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264446 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264580 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264932 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz2zz\" (UniqueName: \"kubernetes.io/projected/b634d10f-beff-4ef8-8602-8e4acb8a5a4b-kube-api-access-qz2zz\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264950 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.264961 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36183a17-928d-43a5-b1ac-7b8ebb8335c3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.268676 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll" (OuterVolumeSpecName: "kube-api-access-sgdll") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "kube-api-access-sgdll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.269091 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts" (OuterVolumeSpecName: "scripts") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.291577 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.321847 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b634d10f-beff-4ef8-8602-8e4acb8a5a4b","Type":"ContainerDied","Data":"b7bea182960a30bd09c1ea224e27201a75bf99abaeaef395419c2f63f3bffe7d"} Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.321886 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.321905 4852 scope.go:117] "RemoveContainer" containerID="1a3020dd02aaa6a46b248b982f5497dd2bb5178a04e3e26688b42ba73051ade6" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.325618 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"49382464-d20a-4ec7-9096-5679b0fc12b7","Type":"ContainerStarted","Data":"803e4b66d3630be7491db23b6b9e9d91d9213e9d663d1015e26cd200c41957a3"} Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.341326 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a"} Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.346527 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36183a17-928d-43a5-b1ac-7b8ebb8335c3","Type":"ContainerDied","Data":"076a34cc9b47f587b71aae53dd10933e40570cd25b0166c6f0cb20e675a97eec"} Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.346652 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.359125 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.7448933549999999 podStartE2EDuration="12.359097757s" podCreationTimestamp="2025-12-01 20:23:41 +0000 UTC" firstStartedPulling="2025-12-01 20:23:42.150851927 +0000 UTC m=+1142.077933354" lastFinishedPulling="2025-12-01 20:23:52.765056339 +0000 UTC m=+1152.692137756" observedRunningTime="2025-12-01 20:23:53.353934295 +0000 UTC m=+1153.281015712" watchObservedRunningTime="2025-12-01 20:23:53.359097757 +0000 UTC m=+1153.286179174" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.359189 4852 scope.go:117] "RemoveContainer" containerID="78bddec1e1a48a38ef379e8aab54ef42a518d40dfc77962e0c00c5cbc9b8d065" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.367406 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.367500 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgdll\" (UniqueName: \"kubernetes.io/projected/36183a17-928d-43a5-b1ac-7b8ebb8335c3-kube-api-access-sgdll\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.367517 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.387838 4852 scope.go:117] "RemoveContainer" containerID="65315aa2b20e6cc2604c6107843fae180850ec8e03b92b7a6860b9bfbc3ae95f" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.394349 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.404561 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.406046 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data" (OuterVolumeSpecName: "config-data") pod "36183a17-928d-43a5-b1ac-7b8ebb8335c3" (UID: "36183a17-928d-43a5-b1ac-7b8ebb8335c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.411901 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422343 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: E1201 20:23:53.422833 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="sg-core" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422853 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="sg-core" Dec 01 20:23:53 crc kubenswrapper[4852]: E1201 20:23:53.422867 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="proxy-httpd" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422876 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="proxy-httpd" Dec 01 20:23:53 crc kubenswrapper[4852]: E1201 20:23:53.422888 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-notification-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422895 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-notification-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: E1201 20:23:53.422922 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-central-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422930 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-central-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: E1201 20:23:53.422944 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerName="kube-state-metrics" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.422950 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerName="kube-state-metrics" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423125 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="sg-core" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423136 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-notification-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423147 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="proxy-httpd" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423161 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" containerName="ceilometer-central-agent" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423182 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" containerName="kube-state-metrics" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423223 4852 scope.go:117] "RemoveContainer" containerID="282bc1d3cc6fa5e631b80a46763e5023be4811e55bb98cca75f29b0f33699bcd" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.423965 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.426742 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.432133 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.455874 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.456294 4852 scope.go:117] "RemoveContainer" containerID="c4728a81a164da5503ec8edc3262f304958949f4fbc4a09f0a7deee8a3173ab1" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469312 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dqnw\" (UniqueName: \"kubernetes.io/projected/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-api-access-5dqnw\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469411 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469466 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469497 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469593 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.469607 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36183a17-928d-43a5-b1ac-7b8ebb8335c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.571008 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dqnw\" (UniqueName: \"kubernetes.io/projected/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-api-access-5dqnw\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.571556 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.571627 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.571680 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.585530 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.585531 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.585621 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2896f52e-0a75-4d18-b72b-66b173aaa3b2-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.591528 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dqnw\" (UniqueName: \"kubernetes.io/projected/2896f52e-0a75-4d18-b72b-66b173aaa3b2-kube-api-access-5dqnw\") pod \"kube-state-metrics-0\" (UID: \"2896f52e-0a75-4d18-b72b-66b173aaa3b2\") " pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.713375 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.723264 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.742691 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.745908 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.748999 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.750097 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.750784 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.756004 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.756288 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779333 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779494 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgrz5\" (UniqueName: \"kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779556 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779620 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779668 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779699 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.779732 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882224 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882541 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882562 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882584 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882677 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882723 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgrz5\" (UniqueName: \"kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882760 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.882777 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.883825 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.884612 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.891554 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.891923 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.892873 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.893764 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.893907 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:53 crc kubenswrapper[4852]: I1201 20:23:53.909542 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgrz5\" (UniqueName: \"kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5\") pod \"ceilometer-0\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " pod="openstack/ceilometer-0" Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.066527 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.242803 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.272250 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 20:23:54 crc kubenswrapper[4852]: W1201 20:23:54.276348 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2896f52e_0a75_4d18_b72b_66b173aaa3b2.slice/crio-53770f1daf0a5a1bf33d5df3ddb9624813ea2ed00610794c3f77c02be01153ce WatchSource:0}: Error finding container 53770f1daf0a5a1bf33d5df3ddb9624813ea2ed00610794c3f77c02be01153ce: Status 404 returned error can't find the container with id 53770f1daf0a5a1bf33d5df3ddb9624813ea2ed00610794c3f77c02be01153ce Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.345962 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36183a17-928d-43a5-b1ac-7b8ebb8335c3" path="/var/lib/kubelet/pods/36183a17-928d-43a5-b1ac-7b8ebb8335c3/volumes" Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.346938 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b634d10f-beff-4ef8-8602-8e4acb8a5a4b" path="/var/lib/kubelet/pods/b634d10f-beff-4ef8-8602-8e4acb8a5a4b/volumes" Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.359972 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2896f52e-0a75-4d18-b72b-66b173aaa3b2","Type":"ContainerStarted","Data":"53770f1daf0a5a1bf33d5df3ddb9624813ea2ed00610794c3f77c02be01153ce"} Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.548996 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:54 crc kubenswrapper[4852]: I1201 20:23:54.631777 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:23:54 crc kubenswrapper[4852]: W1201 20:23:54.632033 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87bac28d_2fef_47d3_a061_f32e360398e4.slice/crio-654706ff3891854b5a66dd8d4d4290e27da175ba0ce413fa30476097808764d7 WatchSource:0}: Error finding container 654706ff3891854b5a66dd8d4d4290e27da175ba0ce413fa30476097808764d7: Status 404 returned error can't find the container with id 654706ff3891854b5a66dd8d4d4290e27da175ba0ce413fa30476097808764d7 Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.120155 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.120702 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-cd9d56787-qlkbk" Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.213918 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.214263 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-log" containerID="cri-o://d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7" gracePeriod=30 Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.214412 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-httpd" containerID="cri-o://d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b" gracePeriod=30 Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.398378 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2896f52e-0a75-4d18-b72b-66b173aaa3b2","Type":"ContainerStarted","Data":"90193e92d3b5bccace1e137be6a1f9b50ba9bff468f267f8524de99b6ea5e42c"} Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.400322 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.405324 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerStarted","Data":"654706ff3891854b5a66dd8d4d4290e27da175ba0ce413fa30476097808764d7"} Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.406989 4852 generic.go:334] "Generic (PLEG): container finished" podID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerID="d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7" exitCode=143 Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.407019 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerDied","Data":"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7"} Dec 01 20:23:55 crc kubenswrapper[4852]: I1201 20:23:55.453382 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.058861636 podStartE2EDuration="2.45335456s" podCreationTimestamp="2025-12-01 20:23:53 +0000 UTC" firstStartedPulling="2025-12-01 20:23:54.316199972 +0000 UTC m=+1154.243281389" lastFinishedPulling="2025-12-01 20:23:54.710692896 +0000 UTC m=+1154.637774313" observedRunningTime="2025-12-01 20:23:55.446965009 +0000 UTC m=+1155.374046416" watchObservedRunningTime="2025-12-01 20:23:55.45335456 +0000 UTC m=+1155.380435977" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.222549 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.223247 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-log" containerID="cri-o://af740d1b552f0b3e423e9e9856e527cdaa85ef20f2cd30f0c7f236f3fd0c1961" gracePeriod=30 Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.223426 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-httpd" containerID="cri-o://2f92d9f708bb46fd47a913597b9c5d38fe1e6a397e9b3299a15733b35526b617" gracePeriod=30 Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.419761 4852 generic.go:334] "Generic (PLEG): container finished" podID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerID="af740d1b552f0b3e423e9e9856e527cdaa85ef20f2cd30f0c7f236f3fd0c1961" exitCode=143 Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.419882 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerDied","Data":"af740d1b552f0b3e423e9e9856e527cdaa85ef20f2cd30f0c7f236f3fd0c1961"} Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.422493 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerStarted","Data":"e7338470449dc36621df47c27e17cbadf2f20104671bc57407a05f5a593a807c"} Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.667130 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-qq8bt"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.668469 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.680912 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qq8bt"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.750373 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.750753 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zhnf\" (UniqueName: \"kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.766486 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-gwfcc"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.768019 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.791219 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-f3b0-account-create-update-vzdnp"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.793163 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.795914 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.799983 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-gwfcc"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.812432 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f3b0-account-create-update-vzdnp"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.854777 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.854888 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm7bs\" (UniqueName: \"kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.854933 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.855044 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.855085 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c2cr\" (UniqueName: \"kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.855150 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zhnf\" (UniqueName: \"kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.855888 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.865863 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-g4rk6"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.867292 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.880559 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zhnf\" (UniqueName: \"kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf\") pod \"nova-api-db-create-qq8bt\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.933795 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-g4rk6"] Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.960824 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c2cr\" (UniqueName: \"kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.960990 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.961032 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.961107 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjdr9\" (UniqueName: \"kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.961154 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm7bs\" (UniqueName: \"kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.961230 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.964260 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.968748 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.987232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm7bs\" (UniqueName: \"kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs\") pod \"nova-api-f3b0-account-create-update-vzdnp\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.988787 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c2cr\" (UniqueName: \"kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr\") pod \"nova-cell0-db-create-gwfcc\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:56 crc kubenswrapper[4852]: I1201 20:23:56.991873 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.024835 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ace9-account-create-update-t4g7j"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.029838 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.032178 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.039807 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ace9-account-create-update-t4g7j"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.063867 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.064012 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjdr9\" (UniqueName: \"kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.067098 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.083155 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjdr9\" (UniqueName: \"kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9\") pod \"nova-cell1-db-create-g4rk6\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.092117 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.108605 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.175799 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx78l\" (UniqueName: \"kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.176320 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.190119 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-63da-account-create-update-2dq9v"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.191695 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.197084 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.224256 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-63da-account-create-update-2dq9v"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.241582 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.278878 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.278970 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx78l\" (UniqueName: \"kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.279075 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.279121 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6hv4\" (UniqueName: \"kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.299268 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.308910 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx78l\" (UniqueName: \"kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l\") pod \"nova-cell0-ace9-account-create-update-t4g7j\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.382269 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.382626 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6hv4\" (UniqueName: \"kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.384528 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.407576 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6hv4\" (UniqueName: \"kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4\") pod \"nova-cell1-63da-account-create-update-2dq9v\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.472035 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerStarted","Data":"91f40c533416a50425ec8894f5e9a75a21b44a7001ae83206731b1cb30675898"} Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.533048 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.566229 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.606110 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qq8bt"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.780628 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f3b0-account-create-update-vzdnp"] Dec 01 20:23:57 crc kubenswrapper[4852]: I1201 20:23:57.872986 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-gwfcc"] Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.008224 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-g4rk6"] Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.226439 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ace9-account-create-update-t4g7j"] Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.338062 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-63da-account-create-update-2dq9v"] Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.491637 4852 generic.go:334] "Generic (PLEG): container finished" podID="70ae0323-a451-41a9-b76e-899430114d6c" containerID="7984de1dc8e3b2c722c5914f54901c9685cc19c67b860a61e974b590df0e2a58" exitCode=0 Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.491741 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qq8bt" event={"ID":"70ae0323-a451-41a9-b76e-899430114d6c","Type":"ContainerDied","Data":"7984de1dc8e3b2c722c5914f54901c9685cc19c67b860a61e974b590df0e2a58"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.491842 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qq8bt" event={"ID":"70ae0323-a451-41a9-b76e-899430114d6c","Type":"ContainerStarted","Data":"7a29b4c2bb09acaf2eb0dba0244eb95a1a6f0935e1373f5bfc7d928ed268312d"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.496031 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" event={"ID":"b0c09ee4-565f-4b6e-b17f-78defa53bde3","Type":"ContainerStarted","Data":"79bf6bede45fcfb56c98712262b92722f2183ee76e89fe90ef14500396fe4d2c"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.498805 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-g4rk6" event={"ID":"2757bd33-854b-4876-965d-d77359752edb","Type":"ContainerStarted","Data":"f32329b1a51582daf5653aecdc1410c54fe2266f973ae1eaabc6270a34ac7a26"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.498837 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-g4rk6" event={"ID":"2757bd33-854b-4876-965d-d77359752edb","Type":"ContainerStarted","Data":"9b514d73dbd1005d0230fe7c63283eb57ba383b259c6d7f0d87b1fe3005a358b"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.507329 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerStarted","Data":"7b71c6200365dfdaa3b5c14cb04fc7b5200024ea32e00826ee39ec07a7cfe580"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.510313 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" event={"ID":"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb","Type":"ContainerStarted","Data":"860fda961fb837a11c587fc705ab12b1d492c5974fd6c6fa63a4f920fddb65f8"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.510359 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" event={"ID":"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb","Type":"ContainerStarted","Data":"762be0133183e7b74ab3ed826366dc99bc4e6c55ed3fee2f1206a810a193f786"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.521309 4852 generic.go:334] "Generic (PLEG): container finished" podID="31688a6c-099f-466e-b08b-99b67777aadd" containerID="f7922d0010a7699f282c7f2614bda314b354850b4b6898daa6fdaec5df96fa8e" exitCode=0 Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.521433 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" event={"ID":"31688a6c-099f-466e-b08b-99b67777aadd","Type":"ContainerDied","Data":"f7922d0010a7699f282c7f2614bda314b354850b4b6898daa6fdaec5df96fa8e"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.521492 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" event={"ID":"31688a6c-099f-466e-b08b-99b67777aadd","Type":"ContainerStarted","Data":"28ba31018339207144ca60330883fd26786f6ba348a04c1df53c21d5c2e28a25"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.545981 4852 generic.go:334] "Generic (PLEG): container finished" podID="477f2357-5a45-4725-b0a1-77f6b5a8eeb3" containerID="5911fa4781f08ccc500f08d7b6dfcc60fb2eccea30a7fcfec5c3e2e4f047e88f" exitCode=0 Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.546042 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gwfcc" event={"ID":"477f2357-5a45-4725-b0a1-77f6b5a8eeb3","Type":"ContainerDied","Data":"5911fa4781f08ccc500f08d7b6dfcc60fb2eccea30a7fcfec5c3e2e4f047e88f"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.546073 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gwfcc" event={"ID":"477f2357-5a45-4725-b0a1-77f6b5a8eeb3","Type":"ContainerStarted","Data":"758a9aa9ff81f31356d76f5286eb3874d65022ddc608fedf7a87c4766d9a2c3d"} Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.556002 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-g4rk6" podStartSLOduration=2.555977988 podStartE2EDuration="2.555977988s" podCreationTimestamp="2025-12-01 20:23:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:58.548171853 +0000 UTC m=+1158.475253280" watchObservedRunningTime="2025-12-01 20:23:58.555977988 +0000 UTC m=+1158.483059405" Dec 01 20:23:58 crc kubenswrapper[4852]: I1201 20:23:58.609577 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" podStartSLOduration=2.60954625 podStartE2EDuration="2.60954625s" podCreationTimestamp="2025-12-01 20:23:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:23:58.599321528 +0000 UTC m=+1158.526402945" watchObservedRunningTime="2025-12-01 20:23:58.60954625 +0000 UTC m=+1158.536627667" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.131027 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: E1201 20:23:59.152522 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0c09ee4_565f_4b6e_b17f_78defa53bde3.slice/crio-31440777c6aa2fbc0670d01c36e122863c9678b06b498746b4e38b8a26fef230.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0c09ee4_565f_4b6e_b17f_78defa53bde3.slice/crio-conmon-31440777c6aa2fbc0670d01c36e122863c9678b06b498746b4e38b8a26fef230.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235479 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235589 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235659 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235755 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235816 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235907 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7trmh\" (UniqueName: \"kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235948 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.235979 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"3d67c2f4-a127-4a7f-bb01-15543416188f\" (UID: \"3d67c2f4-a127-4a7f-bb01-15543416188f\") " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.236743 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs" (OuterVolumeSpecName: "logs") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.236850 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.246605 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts" (OuterVolumeSpecName: "scripts") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.247521 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.264953 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh" (OuterVolumeSpecName: "kube-api-access-7trmh") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "kube-api-access-7trmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.291174 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.330346 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data" (OuterVolumeSpecName: "config-data") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.331348 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3d67c2f4-a127-4a7f-bb01-15543416188f" (UID: "3d67c2f4-a127-4a7f-bb01-15543416188f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339061 4852 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339116 4852 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339128 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339140 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7trmh\" (UniqueName: \"kubernetes.io/projected/3d67c2f4-a127-4a7f-bb01-15543416188f-kube-api-access-7trmh\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339151 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339198 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339208 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d67c2f4-a127-4a7f-bb01-15543416188f-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.339216 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d67c2f4-a127-4a7f-bb01-15543416188f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.371142 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.446049 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.566580 4852 generic.go:334] "Generic (PLEG): container finished" podID="6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" containerID="860fda961fb837a11c587fc705ab12b1d492c5974fd6c6fa63a4f920fddb65f8" exitCode=0 Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.566670 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" event={"ID":"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb","Type":"ContainerDied","Data":"860fda961fb837a11c587fc705ab12b1d492c5974fd6c6fa63a4f920fddb65f8"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.573665 4852 generic.go:334] "Generic (PLEG): container finished" podID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerID="d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b" exitCode=0 Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.573769 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerDied","Data":"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.573777 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.573835 4852 scope.go:117] "RemoveContainer" containerID="d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.573815 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d67c2f4-a127-4a7f-bb01-15543416188f","Type":"ContainerDied","Data":"53f1ad250140f7a408ffbae9efff4621c0fb798e1b424e263b82766eb722e4b6"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.595713 4852 generic.go:334] "Generic (PLEG): container finished" podID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerID="2f92d9f708bb46fd47a913597b9c5d38fe1e6a397e9b3299a15733b35526b617" exitCode=0 Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.595789 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerDied","Data":"2f92d9f708bb46fd47a913597b9c5d38fe1e6a397e9b3299a15733b35526b617"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.597776 4852 generic.go:334] "Generic (PLEG): container finished" podID="b0c09ee4-565f-4b6e-b17f-78defa53bde3" containerID="31440777c6aa2fbc0670d01c36e122863c9678b06b498746b4e38b8a26fef230" exitCode=0 Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.597838 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" event={"ID":"b0c09ee4-565f-4b6e-b17f-78defa53bde3","Type":"ContainerDied","Data":"31440777c6aa2fbc0670d01c36e122863c9678b06b498746b4e38b8a26fef230"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.600203 4852 generic.go:334] "Generic (PLEG): container finished" podID="2757bd33-854b-4876-965d-d77359752edb" containerID="f32329b1a51582daf5653aecdc1410c54fe2266f973ae1eaabc6270a34ac7a26" exitCode=0 Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.600581 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-g4rk6" event={"ID":"2757bd33-854b-4876-965d-d77359752edb","Type":"ContainerDied","Data":"f32329b1a51582daf5653aecdc1410c54fe2266f973ae1eaabc6270a34ac7a26"} Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.700628 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.720316 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.746957 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:23:59 crc kubenswrapper[4852]: E1201 20:23:59.747557 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-log" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.747590 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-log" Dec 01 20:23:59 crc kubenswrapper[4852]: E1201 20:23:59.747600 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-httpd" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.747606 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-httpd" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.747823 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-httpd" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.747841 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" containerName="glance-log" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.749097 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.752623 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.753097 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.770063 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.781830 4852 scope.go:117] "RemoveContainer" containerID="d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.854408 4852 scope.go:117] "RemoveContainer" containerID="d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.855806 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.855855 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-config-data\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.855906 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-logs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.855949 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.855975 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rn8m\" (UniqueName: \"kubernetes.io/projected/02fed1df-7a8d-41ed-8662-17ecda728c06-kube-api-access-5rn8m\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.856006 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.856054 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.856091 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-scripts\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: E1201 20:23:59.857967 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b\": container with ID starting with d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b not found: ID does not exist" containerID="d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.858030 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b"} err="failed to get container status \"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b\": rpc error: code = NotFound desc = could not find container \"d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b\": container with ID starting with d83dc4308c117985321f98d687e57d5da4d5fc79e88191b166eb8db271112a7b not found: ID does not exist" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.858065 4852 scope.go:117] "RemoveContainer" containerID="d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7" Dec 01 20:23:59 crc kubenswrapper[4852]: E1201 20:23:59.858425 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7\": container with ID starting with d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7 not found: ID does not exist" containerID="d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.858492 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7"} err="failed to get container status \"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7\": rpc error: code = NotFound desc = could not find container \"d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7\": container with ID starting with d847710954370bfcdb6b877560f5aafec37faac31fe7109ac59bc8b2f8e25ad7 not found: ID does not exist" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958252 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-config-data\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958373 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-logs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958425 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958480 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rn8m\" (UniqueName: \"kubernetes.io/projected/02fed1df-7a8d-41ed-8662-17ecda728c06-kube-api-access-5rn8m\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958521 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958562 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958604 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-scripts\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.958893 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.959163 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.959853 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02fed1df-7a8d-41ed-8662-17ecda728c06-logs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.971396 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-config-data\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.972543 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.973132 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:23:59 crc kubenswrapper[4852]: I1201 20:23:59.980800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rn8m\" (UniqueName: \"kubernetes.io/projected/02fed1df-7a8d-41ed-8662-17ecda728c06-kube-api-access-5rn8m\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.019047 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02fed1df-7a8d-41ed-8662-17ecda728c06-scripts\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.081085 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"02fed1df-7a8d-41ed-8662-17ecda728c06\") " pod="openstack/glance-default-external-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.192425 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267126 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267214 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267253 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267307 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzzz6\" (UniqueName: \"kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267399 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267615 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267653 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.267700 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs\") pod \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\" (UID: \"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.268368 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs" (OuterVolumeSpecName: "logs") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.270630 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.277679 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.293877 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.302019 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6" (OuterVolumeSpecName: "kube-api-access-tzzz6") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "kube-api-access-tzzz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.307772 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts" (OuterVolumeSpecName: "scripts") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.322966 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.370970 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts\") pod \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.371222 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c2cr\" (UniqueName: \"kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr\") pod \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\" (UID: \"477f2357-5a45-4725-b0a1-77f6b5a8eeb3\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.371303 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts\") pod \"70ae0323-a451-41a9-b76e-899430114d6c\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.371348 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zhnf\" (UniqueName: \"kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf\") pod \"70ae0323-a451-41a9-b76e-899430114d6c\" (UID: \"70ae0323-a451-41a9-b76e-899430114d6c\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.372171 4852 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.372193 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.372223 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.372234 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzzz6\" (UniqueName: \"kubernetes.io/projected/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-kube-api-access-tzzz6\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.372248 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.374144 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70ae0323-a451-41a9-b76e-899430114d6c" (UID: "70ae0323-a451-41a9-b76e-899430114d6c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.374282 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "477f2357-5a45-4725-b0a1-77f6b5a8eeb3" (UID: "477f2357-5a45-4725-b0a1-77f6b5a8eeb3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.379794 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf" (OuterVolumeSpecName: "kube-api-access-4zhnf") pod "70ae0323-a451-41a9-b76e-899430114d6c" (UID: "70ae0323-a451-41a9-b76e-899430114d6c"). InnerVolumeSpecName "kube-api-access-4zhnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.383544 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.388316 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.391798 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr" (OuterVolumeSpecName: "kube-api-access-9c2cr") pod "477f2357-5a45-4725-b0a1-77f6b5a8eeb3" (UID: "477f2357-5a45-4725-b0a1-77f6b5a8eeb3"). InnerVolumeSpecName "kube-api-access-9c2cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.398744 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.410585 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d67c2f4-a127-4a7f-bb01-15543416188f" path="/var/lib/kubelet/pods/3d67c2f4-a127-4a7f-bb01-15543416188f/volumes" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.416132 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data" (OuterVolumeSpecName: "config-data") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.428552 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" (UID: "f1d8a3fc-8cb0-496d-a22d-c590235f0a1d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.445844 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.489142 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm7bs\" (UniqueName: \"kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs\") pod \"31688a6c-099f-466e-b08b-99b67777aadd\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.489290 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts\") pod \"31688a6c-099f-466e-b08b-99b67777aadd\" (UID: \"31688a6c-099f-466e-b08b-99b67777aadd\") " Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492431 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zhnf\" (UniqueName: \"kubernetes.io/projected/70ae0323-a451-41a9-b76e-899430114d6c-kube-api-access-4zhnf\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492538 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492554 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492568 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492583 4852 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492597 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c2cr\" (UniqueName: \"kubernetes.io/projected/477f2357-5a45-4725-b0a1-77f6b5a8eeb3-kube-api-access-9c2cr\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492612 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.492626 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70ae0323-a451-41a9-b76e-899430114d6c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.493728 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31688a6c-099f-466e-b08b-99b67777aadd" (UID: "31688a6c-099f-466e-b08b-99b67777aadd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.501880 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs" (OuterVolumeSpecName: "kube-api-access-zm7bs") pod "31688a6c-099f-466e-b08b-99b67777aadd" (UID: "31688a6c-099f-466e-b08b-99b67777aadd"). InnerVolumeSpecName "kube-api-access-zm7bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.594287 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31688a6c-099f-466e-b08b-99b67777aadd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.594328 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm7bs\" (UniqueName: \"kubernetes.io/projected/31688a6c-099f-466e-b08b-99b67777aadd-kube-api-access-zm7bs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.637899 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f1d8a3fc-8cb0-496d-a22d-c590235f0a1d","Type":"ContainerDied","Data":"5ae3dc79a4fa06701426e27d69f588ba1083189a3716ab001ee9416f5d89afc2"} Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.637958 4852 scope.go:117] "RemoveContainer" containerID="2f92d9f708bb46fd47a913597b9c5d38fe1e6a397e9b3299a15733b35526b617" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.638144 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.659395 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerStarted","Data":"7997bbdfe314d2b109232c7c0a9860b27b00a48a03396278e78519e963aebb92"} Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.659632 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-central-agent" containerID="cri-o://e7338470449dc36621df47c27e17cbadf2f20104671bc57407a05f5a593a807c" gracePeriod=30 Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.659722 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.659847 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="proxy-httpd" containerID="cri-o://7997bbdfe314d2b109232c7c0a9860b27b00a48a03396278e78519e963aebb92" gracePeriod=30 Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.659971 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-notification-agent" containerID="cri-o://91f40c533416a50425ec8894f5e9a75a21b44a7001ae83206731b1cb30675898" gracePeriod=30 Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.660046 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="sg-core" containerID="cri-o://7b71c6200365dfdaa3b5c14cb04fc7b5200024ea32e00826ee39ec07a7cfe580" gracePeriod=30 Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.673574 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" event={"ID":"31688a6c-099f-466e-b08b-99b67777aadd","Type":"ContainerDied","Data":"28ba31018339207144ca60330883fd26786f6ba348a04c1df53c21d5c2e28a25"} Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.673620 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f3b0-account-create-update-vzdnp" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.676468 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28ba31018339207144ca60330883fd26786f6ba348a04c1df53c21d5c2e28a25" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.679982 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gwfcc" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.680276 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gwfcc" event={"ID":"477f2357-5a45-4725-b0a1-77f6b5a8eeb3","Type":"ContainerDied","Data":"758a9aa9ff81f31356d76f5286eb3874d65022ddc608fedf7a87c4766d9a2c3d"} Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.680397 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="758a9aa9ff81f31356d76f5286eb3874d65022ddc608fedf7a87c4766d9a2c3d" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.688193 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qq8bt" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.689108 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qq8bt" event={"ID":"70ae0323-a451-41a9-b76e-899430114d6c","Type":"ContainerDied","Data":"7a29b4c2bb09acaf2eb0dba0244eb95a1a6f0935e1373f5bfc7d928ed268312d"} Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.689885 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a29b4c2bb09acaf2eb0dba0244eb95a1a6f0935e1373f5bfc7d928ed268312d" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.696172 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.980095757 podStartE2EDuration="7.696146313s" podCreationTimestamp="2025-12-01 20:23:53 +0000 UTC" firstStartedPulling="2025-12-01 20:23:54.635020852 +0000 UTC m=+1154.562102289" lastFinishedPulling="2025-12-01 20:23:59.351071428 +0000 UTC m=+1159.278152845" observedRunningTime="2025-12-01 20:24:00.694127169 +0000 UTC m=+1160.621208586" watchObservedRunningTime="2025-12-01 20:24:00.696146313 +0000 UTC m=+1160.623227730" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.749131 4852 scope.go:117] "RemoveContainer" containerID="af740d1b552f0b3e423e9e9856e527cdaa85ef20f2cd30f0c7f236f3fd0c1961" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.788890 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.817861 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.831846 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:24:00 crc kubenswrapper[4852]: E1201 20:24:00.832421 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-httpd" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832444 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-httpd" Dec 01 20:24:00 crc kubenswrapper[4852]: E1201 20:24:00.832469 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ae0323-a451-41a9-b76e-899430114d6c" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832481 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ae0323-a451-41a9-b76e-899430114d6c" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: E1201 20:24:00.832502 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31688a6c-099f-466e-b08b-99b67777aadd" containerName="mariadb-account-create-update" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832510 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="31688a6c-099f-466e-b08b-99b67777aadd" containerName="mariadb-account-create-update" Dec 01 20:24:00 crc kubenswrapper[4852]: E1201 20:24:00.832538 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477f2357-5a45-4725-b0a1-77f6b5a8eeb3" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832544 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="477f2357-5a45-4725-b0a1-77f6b5a8eeb3" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: E1201 20:24:00.832556 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-log" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832564 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-log" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832775 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ae0323-a451-41a9-b76e-899430114d6c" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832808 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="31688a6c-099f-466e-b08b-99b67777aadd" containerName="mariadb-account-create-update" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832823 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="477f2357-5a45-4725-b0a1-77f6b5a8eeb3" containerName="mariadb-database-create" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832839 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-log" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.832848 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" containerName="glance-httpd" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.841342 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.844969 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.845186 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.859274 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.905822 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.907550 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.907717 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.907782 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.907882 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.907972 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.908150 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqrkw\" (UniqueName: \"kubernetes.io/projected/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-kube-api-access-bqrkw\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:00 crc kubenswrapper[4852]: I1201 20:24:00.908174 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009285 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009369 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009399 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009435 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009499 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009579 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqrkw\" (UniqueName: \"kubernetes.io/projected/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-kube-api-access-bqrkw\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009607 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.009664 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.010219 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.011887 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.013368 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.019332 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.020708 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.022909 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.032909 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.035033 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqrkw\" (UniqueName: \"kubernetes.io/projected/c6cbf2dd-2b08-4fa7-9530-e5835103a6d3-kube-api-access-bqrkw\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.057051 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 20:24:01 crc kubenswrapper[4852]: W1201 20:24:01.073793 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02fed1df_7a8d_41ed_8662_17ecda728c06.slice/crio-1fe0d414b6dd76fcbcac694017f94a043176ec1746212ba62f414b6d77af38dd WatchSource:0}: Error finding container 1fe0d414b6dd76fcbcac694017f94a043176ec1746212ba62f414b6d77af38dd: Status 404 returned error can't find the container with id 1fe0d414b6dd76fcbcac694017f94a043176ec1746212ba62f414b6d77af38dd Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.098678 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3\") " pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.198955 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.249175 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.318381 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjdr9\" (UniqueName: \"kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9\") pod \"2757bd33-854b-4876-965d-d77359752edb\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.319265 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts\") pod \"2757bd33-854b-4876-965d-d77359752edb\" (UID: \"2757bd33-854b-4876-965d-d77359752edb\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.320437 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2757bd33-854b-4876-965d-d77359752edb" (UID: "2757bd33-854b-4876-965d-d77359752edb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.330350 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2757bd33-854b-4876-965d-d77359752edb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.350704 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9" (OuterVolumeSpecName: "kube-api-access-pjdr9") pod "2757bd33-854b-4876-965d-d77359752edb" (UID: "2757bd33-854b-4876-965d-d77359752edb"). InnerVolumeSpecName "kube-api-access-pjdr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.432123 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjdr9\" (UniqueName: \"kubernetes.io/projected/2757bd33-854b-4876-965d-d77359752edb-kube-api-access-pjdr9\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.568814 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.568864 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.700974 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" event={"ID":"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb","Type":"ContainerDied","Data":"762be0133183e7b74ab3ed826366dc99bc4e6c55ed3fee2f1206a810a193f786"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.701626 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="762be0133183e7b74ab3ed826366dc99bc4e6c55ed3fee2f1206a810a193f786" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.701748 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ace9-account-create-update-t4g7j" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.710830 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" event={"ID":"b0c09ee4-565f-4b6e-b17f-78defa53bde3","Type":"ContainerDied","Data":"79bf6bede45fcfb56c98712262b92722f2183ee76e89fe90ef14500396fe4d2c"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.710880 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79bf6bede45fcfb56c98712262b92722f2183ee76e89fe90ef14500396fe4d2c" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.710914 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-63da-account-create-update-2dq9v" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.713043 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-g4rk6" event={"ID":"2757bd33-854b-4876-965d-d77359752edb","Type":"ContainerDied","Data":"9b514d73dbd1005d0230fe7c63283eb57ba383b259c6d7f0d87b1fe3005a358b"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.713122 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b514d73dbd1005d0230fe7c63283eb57ba383b259c6d7f0d87b1fe3005a358b" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.713077 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-g4rk6" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.715214 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02fed1df-7a8d-41ed-8662-17ecda728c06","Type":"ContainerStarted","Data":"1fe0d414b6dd76fcbcac694017f94a043176ec1746212ba62f414b6d77af38dd"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721166 4852 generic.go:334] "Generic (PLEG): container finished" podID="87bac28d-2fef-47d3-a061-f32e360398e4" containerID="7997bbdfe314d2b109232c7c0a9860b27b00a48a03396278e78519e963aebb92" exitCode=0 Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721202 4852 generic.go:334] "Generic (PLEG): container finished" podID="87bac28d-2fef-47d3-a061-f32e360398e4" containerID="7b71c6200365dfdaa3b5c14cb04fc7b5200024ea32e00826ee39ec07a7cfe580" exitCode=2 Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721209 4852 generic.go:334] "Generic (PLEG): container finished" podID="87bac28d-2fef-47d3-a061-f32e360398e4" containerID="91f40c533416a50425ec8894f5e9a75a21b44a7001ae83206731b1cb30675898" exitCode=0 Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721233 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerDied","Data":"7997bbdfe314d2b109232c7c0a9860b27b00a48a03396278e78519e963aebb92"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721263 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerDied","Data":"7b71c6200365dfdaa3b5c14cb04fc7b5200024ea32e00826ee39ec07a7cfe580"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.721274 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerDied","Data":"91f40c533416a50425ec8894f5e9a75a21b44a7001ae83206731b1cb30675898"} Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.754279 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts\") pod \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.754356 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx78l\" (UniqueName: \"kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l\") pod \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.754646 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6hv4\" (UniqueName: \"kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4\") pod \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\" (UID: \"b0c09ee4-565f-4b6e-b17f-78defa53bde3\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.754965 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts\") pod \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\" (UID: \"6546f5f4-5cb1-4cb4-a403-1fc050cd9efb\") " Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.756012 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" (UID: "6546f5f4-5cb1-4cb4-a403-1fc050cd9efb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.756180 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b0c09ee4-565f-4b6e-b17f-78defa53bde3" (UID: "b0c09ee4-565f-4b6e-b17f-78defa53bde3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.771260 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l" (OuterVolumeSpecName: "kube-api-access-dx78l") pod "6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" (UID: "6546f5f4-5cb1-4cb4-a403-1fc050cd9efb"). InnerVolumeSpecName "kube-api-access-dx78l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.771709 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4" (OuterVolumeSpecName: "kube-api-access-r6hv4") pod "b0c09ee4-565f-4b6e-b17f-78defa53bde3" (UID: "b0c09ee4-565f-4b6e-b17f-78defa53bde3"). InnerVolumeSpecName "kube-api-access-r6hv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.864623 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.864673 4852 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0c09ee4-565f-4b6e-b17f-78defa53bde3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.864683 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx78l\" (UniqueName: \"kubernetes.io/projected/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb-kube-api-access-dx78l\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:01 crc kubenswrapper[4852]: I1201 20:24:01.864695 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6hv4\" (UniqueName: \"kubernetes.io/projected/b0c09ee4-565f-4b6e-b17f-78defa53bde3-kube-api-access-r6hv4\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.208013 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.338186 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1d8a3fc-8cb0-496d-a22d-c590235f0a1d" path="/var/lib/kubelet/pods/f1d8a3fc-8cb0-496d-a22d-c590235f0a1d/volumes" Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.746663 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02fed1df-7a8d-41ed-8662-17ecda728c06","Type":"ContainerStarted","Data":"6b5b83536a5ec72391b80cb762aa56f19534917de4307c0766adf4d43fa8ae23"} Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.746741 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02fed1df-7a8d-41ed-8662-17ecda728c06","Type":"ContainerStarted","Data":"8d5d0fa5b53fc17d254c64e12eca883f0b368a5e63513b1701ae324627ef6703"} Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.749747 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3","Type":"ContainerStarted","Data":"7b72ffee5c699b0c3910c9c55a244cfb071d1c9af8c6b8403f3b65fc4dee2580"} Dec 01 20:24:02 crc kubenswrapper[4852]: I1201 20:24:02.790671 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.790639633 podStartE2EDuration="3.790639633s" podCreationTimestamp="2025-12-01 20:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:02.77270496 +0000 UTC m=+1162.699786397" watchObservedRunningTime="2025-12-01 20:24:02.790639633 +0000 UTC m=+1162.717721060" Dec 01 20:24:03 crc kubenswrapper[4852]: I1201 20:24:03.771118 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3","Type":"ContainerStarted","Data":"3b5c553387bc80e7c19301574444c61421e2a26e3ee7bcec301f3d34c6168f7a"} Dec 01 20:24:03 crc kubenswrapper[4852]: I1201 20:24:03.782273 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 20:24:04 crc kubenswrapper[4852]: I1201 20:24:04.242524 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-d7844c8bb-bfdj8" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 01 20:24:04 crc kubenswrapper[4852]: I1201 20:24:04.242688 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:24:04 crc kubenswrapper[4852]: I1201 20:24:04.787805 4852 generic.go:334] "Generic (PLEG): container finished" podID="87bac28d-2fef-47d3-a061-f32e360398e4" containerID="e7338470449dc36621df47c27e17cbadf2f20104671bc57407a05f5a593a807c" exitCode=0 Dec 01 20:24:04 crc kubenswrapper[4852]: I1201 20:24:04.787875 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerDied","Data":"e7338470449dc36621df47c27e17cbadf2f20104671bc57407a05f5a593a807c"} Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.264067 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.343676 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.343754 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.343937 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.344051 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.344151 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.344182 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgrz5\" (UniqueName: \"kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.344212 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.344227 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data\") pod \"87bac28d-2fef-47d3-a061-f32e360398e4\" (UID: \"87bac28d-2fef-47d3-a061-f32e360398e4\") " Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.345910 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.346110 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.352180 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts" (OuterVolumeSpecName: "scripts") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.354054 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5" (OuterVolumeSpecName: "kube-api-access-jgrz5") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "kube-api-access-jgrz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.426352 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.447144 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.447204 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.447223 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.447236 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/87bac28d-2fef-47d3-a061-f32e360398e4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.447252 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgrz5\" (UniqueName: \"kubernetes.io/projected/87bac28d-2fef-47d3-a061-f32e360398e4-kube-api-access-jgrz5\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.452911 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.480906 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.490442 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data" (OuterVolumeSpecName: "config-data") pod "87bac28d-2fef-47d3-a061-f32e360398e4" (UID: "87bac28d-2fef-47d3-a061-f32e360398e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.549769 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.549817 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.549829 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87bac28d-2fef-47d3-a061-f32e360398e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.802200 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"87bac28d-2fef-47d3-a061-f32e360398e4","Type":"ContainerDied","Data":"654706ff3891854b5a66dd8d4d4290e27da175ba0ce413fa30476097808764d7"} Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.802286 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.802323 4852 scope.go:117] "RemoveContainer" containerID="7997bbdfe314d2b109232c7c0a9860b27b00a48a03396278e78519e963aebb92" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.808178 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cbf2dd-2b08-4fa7-9530-e5835103a6d3","Type":"ContainerStarted","Data":"7436a366b4bbe6172e754bdc01e5384659a50370148e273ed60fa61a6f10fb73"} Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.829121 4852 scope.go:117] "RemoveContainer" containerID="7b71c6200365dfdaa3b5c14cb04fc7b5200024ea32e00826ee39ec07a7cfe580" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.838836 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.838810802 podStartE2EDuration="5.838810802s" podCreationTimestamp="2025-12-01 20:24:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:05.835182178 +0000 UTC m=+1165.762263615" watchObservedRunningTime="2025-12-01 20:24:05.838810802 +0000 UTC m=+1165.765892219" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.865777 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.866096 4852 scope.go:117] "RemoveContainer" containerID="91f40c533416a50425ec8894f5e9a75a21b44a7001ae83206731b1cb30675898" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.882625 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.893378 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.893929 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0c09ee4-565f-4b6e-b17f-78defa53bde3" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.893959 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0c09ee4-565f-4b6e-b17f-78defa53bde3" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.893970 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="proxy-httpd" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.893978 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="proxy-httpd" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.893993 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894002 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894006 4852 scope.go:117] "RemoveContainer" containerID="e7338470449dc36621df47c27e17cbadf2f20104671bc57407a05f5a593a807c" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.894030 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2757bd33-854b-4876-965d-d77359752edb" containerName="mariadb-database-create" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894039 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2757bd33-854b-4876-965d-d77359752edb" containerName="mariadb-database-create" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.894054 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-central-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894061 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-central-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.894074 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="sg-core" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894081 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="sg-core" Dec 01 20:24:05 crc kubenswrapper[4852]: E1201 20:24:05.894103 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-notification-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894111 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-notification-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894339 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-notification-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894362 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="ceilometer-central-agent" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894372 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="proxy-httpd" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894382 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="2757bd33-854b-4876-965d-d77359752edb" containerName="mariadb-database-create" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894398 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894409 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" containerName="sg-core" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.894427 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0c09ee4-565f-4b6e-b17f-78defa53bde3" containerName="mariadb-account-create-update" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.898098 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.906288 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.906467 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.906963 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.922792 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.958903 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.958949 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959061 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959112 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959161 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959201 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959226 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:05 crc kubenswrapper[4852]: I1201 20:24:05.959284 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfzq6\" (UniqueName: \"kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061273 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061358 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061482 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061568 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061622 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061682 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061718 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061811 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfzq6\" (UniqueName: \"kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.061962 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.062649 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.069605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.069710 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.070181 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.070268 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.070789 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.081028 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfzq6\" (UniqueName: \"kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6\") pod \"ceilometer-0\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.226780 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.341298 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87bac28d-2fef-47d3-a061-f32e360398e4" path="/var/lib/kubelet/pods/87bac28d-2fef-47d3-a061-f32e360398e4/volumes" Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.732143 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:06 crc kubenswrapper[4852]: I1201 20:24:06.821464 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerStarted","Data":"e5052dc193d0474e0327c75b7811bb2f60c1d56507403bec212d8632eff2e90e"} Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.473155 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lvtxh"] Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.474416 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.476908 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.477209 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c5w7b" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.477249 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.489391 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lvtxh"] Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.598627 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwk7l\" (UniqueName: \"kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.599115 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.599201 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.599224 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.701057 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwk7l\" (UniqueName: \"kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.701144 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.701256 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.701281 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.716431 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.716510 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.720628 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwk7l\" (UniqueName: \"kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.727601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data\") pod \"nova-cell0-conductor-db-sync-lvtxh\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:07 crc kubenswrapper[4852]: I1201 20:24:07.798511 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.154787 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:08 crc kubenswrapper[4852]: W1201 20:24:08.296070 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod049d7d92_2f9a_4d85_af6d_46a56c4f4072.slice/crio-1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838 WatchSource:0}: Error finding container 1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838: Status 404 returned error can't find the container with id 1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838 Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.301352 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lvtxh"] Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.887035 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" event={"ID":"049d7d92-2f9a-4d85-af6d-46a56c4f4072","Type":"ContainerStarted","Data":"1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838"} Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.890572 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerStarted","Data":"df171e6518feb8d34de86877956889076d0c30058897307435849b5697e55e29"} Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.895527 4852 generic.go:334] "Generic (PLEG): container finished" podID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerID="93647e0225eee6336edcf0baca3971246c9732cc76340cbfb6ab71e6e1227f4e" exitCode=137 Dec 01 20:24:08 crc kubenswrapper[4852]: I1201 20:24:08.895588 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerDied","Data":"93647e0225eee6336edcf0baca3971246c9732cc76340cbfb6ab71e6e1227f4e"} Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.061524 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.135792 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.135883 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.135998 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.136043 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.136094 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.136120 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktzcv\" (UniqueName: \"kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.136157 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs\") pod \"734ce01c-357e-438b-bfe6-39fa2044dc13\" (UID: \"734ce01c-357e-438b-bfe6-39fa2044dc13\") " Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.137749 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs" (OuterVolumeSpecName: "logs") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.141037 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.141693 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv" (OuterVolumeSpecName: "kube-api-access-ktzcv") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "kube-api-access-ktzcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.170821 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data" (OuterVolumeSpecName: "config-data") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.175155 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.175472 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts" (OuterVolumeSpecName: "scripts") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.201272 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "734ce01c-357e-438b-bfe6-39fa2044dc13" (UID: "734ce01c-357e-438b-bfe6-39fa2044dc13"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239028 4852 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239072 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734ce01c-357e-438b-bfe6-39fa2044dc13-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239083 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239092 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktzcv\" (UniqueName: \"kubernetes.io/projected/734ce01c-357e-438b-bfe6-39fa2044dc13-kube-api-access-ktzcv\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239104 4852 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239114 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/734ce01c-357e-438b-bfe6-39fa2044dc13-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.239122 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734ce01c-357e-438b-bfe6-39fa2044dc13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.909134 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d7844c8bb-bfdj8" event={"ID":"734ce01c-357e-438b-bfe6-39fa2044dc13","Type":"ContainerDied","Data":"3961654c8c5f939d8b0f779cf13ad5d3ce347e75c2f6cab4b671166bd0e4f8e1"} Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.909189 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d7844c8bb-bfdj8" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.909201 4852 scope.go:117] "RemoveContainer" containerID="65ae73baf982279b1029ae39d9022724da41a50922efb382d9bb761c1c4b1753" Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.915855 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerStarted","Data":"ca959af83340712fbf41b28de6ba7a4d045894a48ba7623766805d56074d7f88"} Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.951860 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:24:09 crc kubenswrapper[4852]: I1201 20:24:09.963788 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-d7844c8bb-bfdj8"] Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.111842 4852 scope.go:117] "RemoveContainer" containerID="93647e0225eee6336edcf0baca3971246c9732cc76340cbfb6ab71e6e1227f4e" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.362133 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" path="/var/lib/kubelet/pods/734ce01c-357e-438b-bfe6-39fa2044dc13/volumes" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.385347 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.385394 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.434930 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.451139 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.932619 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerStarted","Data":"b4f658daeb6b28e37742483ae8cfe565ccc3ae225eff3d52e2d0e8bd12f6a53c"} Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.933116 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 20:24:10 crc kubenswrapper[4852]: I1201 20:24:10.933133 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.251925 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.252006 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.285664 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.298595 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.956294 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerStarted","Data":"c27f181b756b5f906505480cc2f2c95a404f79b7fa7c1572ff8ece45cd0dac5e"} Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957037 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-central-agent" containerID="cri-o://df171e6518feb8d34de86877956889076d0c30058897307435849b5697e55e29" gracePeriod=30 Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957154 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="sg-core" containerID="cri-o://b4f658daeb6b28e37742483ae8cfe565ccc3ae225eff3d52e2d0e8bd12f6a53c" gracePeriod=30 Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957186 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-notification-agent" containerID="cri-o://ca959af83340712fbf41b28de6ba7a4d045894a48ba7623766805d56074d7f88" gracePeriod=30 Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957100 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957081 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="proxy-httpd" containerID="cri-o://c27f181b756b5f906505480cc2f2c95a404f79b7fa7c1572ff8ece45cd0dac5e" gracePeriod=30 Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.957289 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:11 crc kubenswrapper[4852]: I1201 20:24:11.991389 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.2448920709999998 podStartE2EDuration="6.991366384s" podCreationTimestamp="2025-12-01 20:24:05 +0000 UTC" firstStartedPulling="2025-12-01 20:24:06.74092232 +0000 UTC m=+1166.668003737" lastFinishedPulling="2025-12-01 20:24:11.487396633 +0000 UTC m=+1171.414478050" observedRunningTime="2025-12-01 20:24:11.988212024 +0000 UTC m=+1171.915293441" watchObservedRunningTime="2025-12-01 20:24:11.991366384 +0000 UTC m=+1171.918447801" Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.978969 4852 generic.go:334] "Generic (PLEG): container finished" podID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerID="c27f181b756b5f906505480cc2f2c95a404f79b7fa7c1572ff8ece45cd0dac5e" exitCode=0 Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979327 4852 generic.go:334] "Generic (PLEG): container finished" podID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerID="b4f658daeb6b28e37742483ae8cfe565ccc3ae225eff3d52e2d0e8bd12f6a53c" exitCode=2 Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979336 4852 generic.go:334] "Generic (PLEG): container finished" podID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerID="ca959af83340712fbf41b28de6ba7a4d045894a48ba7623766805d56074d7f88" exitCode=0 Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979407 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979418 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979032 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerDied","Data":"c27f181b756b5f906505480cc2f2c95a404f79b7fa7c1572ff8ece45cd0dac5e"} Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979548 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerDied","Data":"b4f658daeb6b28e37742483ae8cfe565ccc3ae225eff3d52e2d0e8bd12f6a53c"} Dec 01 20:24:12 crc kubenswrapper[4852]: I1201 20:24:12.979592 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerDied","Data":"ca959af83340712fbf41b28de6ba7a4d045894a48ba7623766805d56074d7f88"} Dec 01 20:24:13 crc kubenswrapper[4852]: I1201 20:24:13.137976 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 20:24:13 crc kubenswrapper[4852]: I1201 20:24:13.141294 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 20:24:14 crc kubenswrapper[4852]: I1201 20:24:14.162428 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:14 crc kubenswrapper[4852]: I1201 20:24:14.162607 4852 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 20:24:14 crc kubenswrapper[4852]: I1201 20:24:14.199376 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 20:24:15 crc kubenswrapper[4852]: I1201 20:24:15.008111 4852 generic.go:334] "Generic (PLEG): container finished" podID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerID="df171e6518feb8d34de86877956889076d0c30058897307435849b5697e55e29" exitCode=0 Dec 01 20:24:15 crc kubenswrapper[4852]: I1201 20:24:15.008185 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerDied","Data":"df171e6518feb8d34de86877956889076d0c30058897307435849b5697e55e29"} Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.060468 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122259 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122382 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122419 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122577 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122619 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.122731 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfzq6\" (UniqueName: \"kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.123351 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.123428 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle\") pod \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\" (UID: \"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6\") " Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.123527 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.123842 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.124818 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.124839 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.127903 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6" (OuterVolumeSpecName: "kube-api-access-dfzq6") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "kube-api-access-dfzq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.128219 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts" (OuterVolumeSpecName: "scripts") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.151962 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.189474 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.197267 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.227538 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.228088 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.228104 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.228118 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfzq6\" (UniqueName: \"kubernetes.io/projected/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-kube-api-access-dfzq6\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.228133 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.235734 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data" (OuterVolumeSpecName: "config-data") pod "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" (UID: "0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:17 crc kubenswrapper[4852]: I1201 20:24:17.330367 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.050964 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6","Type":"ContainerDied","Data":"e5052dc193d0474e0327c75b7811bb2f60c1d56507403bec212d8632eff2e90e"} Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.051072 4852 scope.go:117] "RemoveContainer" containerID="c27f181b756b5f906505480cc2f2c95a404f79b7fa7c1572ff8ece45cd0dac5e" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.051561 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.055838 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" event={"ID":"049d7d92-2f9a-4d85-af6d-46a56c4f4072","Type":"ContainerStarted","Data":"edea85c8accdf25bbf9241b3ea48036e1a4317e34ef3a8289e54384b72cad418"} Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.081859 4852 scope.go:117] "RemoveContainer" containerID="b4f658daeb6b28e37742483ae8cfe565ccc3ae225eff3d52e2d0e8bd12f6a53c" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.098889 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" podStartSLOduration=2.517565446 podStartE2EDuration="11.098859501s" podCreationTimestamp="2025-12-01 20:24:07 +0000 UTC" firstStartedPulling="2025-12-01 20:24:08.299177078 +0000 UTC m=+1168.226258515" lastFinishedPulling="2025-12-01 20:24:16.880471153 +0000 UTC m=+1176.807552570" observedRunningTime="2025-12-01 20:24:18.079311667 +0000 UTC m=+1178.006393175" watchObservedRunningTime="2025-12-01 20:24:18.098859501 +0000 UTC m=+1178.025940918" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.112582 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.113842 4852 scope.go:117] "RemoveContainer" containerID="ca959af83340712fbf41b28de6ba7a4d045894a48ba7623766805d56074d7f88" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.141516 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.147784 4852 scope.go:117] "RemoveContainer" containerID="df171e6518feb8d34de86877956889076d0c30058897307435849b5697e55e29" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.158227 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.158904 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-notification-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.158928 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-notification-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.158951 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.158959 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.158983 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="proxy-httpd" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.158991 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="proxy-httpd" Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.159017 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon-log" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159026 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon-log" Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.159043 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="sg-core" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159051 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="sg-core" Dec 01 20:24:18 crc kubenswrapper[4852]: E1201 20:24:18.159072 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-central-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159081 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-central-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159311 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159338 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="proxy-httpd" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159350 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="734ce01c-357e-438b-bfe6-39fa2044dc13" containerName="horizon-log" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159362 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-central-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159375 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="ceilometer-notification-agent" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.159390 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" containerName="sg-core" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.161780 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.165830 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.166901 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.167258 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.167509 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.251822 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252179 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252340 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252469 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252608 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252713 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.252912 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfpf6\" (UniqueName: \"kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.253013 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.336779 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6" path="/var/lib/kubelet/pods/0ca7a9c7-5d51-447c-8983-1af7ef0ae0e6/volumes" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.354959 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355055 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355119 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355151 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355184 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355208 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355295 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfpf6\" (UniqueName: \"kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.355319 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.356190 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.359007 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.363634 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.364435 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.371800 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.376514 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.384783 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.388096 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfpf6\" (UniqueName: \"kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6\") pod \"ceilometer-0\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.480798 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:18 crc kubenswrapper[4852]: I1201 20:24:18.983038 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:18 crc kubenswrapper[4852]: W1201 20:24:18.998933 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e6f204f_a06f_4c0e_aa8b_4fd57b568d79.slice/crio-eb782267f91a5d2f6d7d7c8e8503fe81b152bba44f7cf038349acb228ea3b64c WatchSource:0}: Error finding container eb782267f91a5d2f6d7d7c8e8503fe81b152bba44f7cf038349acb228ea3b64c: Status 404 returned error can't find the container with id eb782267f91a5d2f6d7d7c8e8503fe81b152bba44f7cf038349acb228ea3b64c Dec 01 20:24:19 crc kubenswrapper[4852]: I1201 20:24:19.069440 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerStarted","Data":"eb782267f91a5d2f6d7d7c8e8503fe81b152bba44f7cf038349acb228ea3b64c"} Dec 01 20:24:20 crc kubenswrapper[4852]: I1201 20:24:20.103561 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerStarted","Data":"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf"} Dec 01 20:24:22 crc kubenswrapper[4852]: I1201 20:24:22.130369 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerStarted","Data":"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996"} Dec 01 20:24:23 crc kubenswrapper[4852]: I1201 20:24:23.147344 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerStarted","Data":"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481"} Dec 01 20:24:26 crc kubenswrapper[4852]: I1201 20:24:26.183854 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerStarted","Data":"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f"} Dec 01 20:24:26 crc kubenswrapper[4852]: I1201 20:24:26.184730 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:24:26 crc kubenswrapper[4852]: I1201 20:24:26.212797 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.253866932 podStartE2EDuration="8.212774914s" podCreationTimestamp="2025-12-01 20:24:18 +0000 UTC" firstStartedPulling="2025-12-01 20:24:19.00412918 +0000 UTC m=+1178.931210617" lastFinishedPulling="2025-12-01 20:24:24.963037182 +0000 UTC m=+1184.890118599" observedRunningTime="2025-12-01 20:24:26.21041452 +0000 UTC m=+1186.137495967" watchObservedRunningTime="2025-12-01 20:24:26.212774914 +0000 UTC m=+1186.139856341" Dec 01 20:24:26 crc kubenswrapper[4852]: I1201 20:24:26.704591 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.207176 4852 generic.go:334] "Generic (PLEG): container finished" podID="049d7d92-2f9a-4d85-af6d-46a56c4f4072" containerID="edea85c8accdf25bbf9241b3ea48036e1a4317e34ef3a8289e54384b72cad418" exitCode=0 Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.208766 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-central-agent" containerID="cri-o://45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" gracePeriod=30 Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.209149 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" event={"ID":"049d7d92-2f9a-4d85-af6d-46a56c4f4072","Type":"ContainerDied","Data":"edea85c8accdf25bbf9241b3ea48036e1a4317e34ef3a8289e54384b72cad418"} Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.209739 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="proxy-httpd" containerID="cri-o://90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" gracePeriod=30 Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.209859 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="sg-core" containerID="cri-o://dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" gracePeriod=30 Dec 01 20:24:28 crc kubenswrapper[4852]: I1201 20:24:28.209955 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-notification-agent" containerID="cri-o://c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" gracePeriod=30 Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.232716 4852 generic.go:334] "Generic (PLEG): container finished" podID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerID="90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" exitCode=0 Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.232760 4852 generic.go:334] "Generic (PLEG): container finished" podID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerID="dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" exitCode=2 Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.232777 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerDied","Data":"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f"} Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.232825 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerDied","Data":"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481"} Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.666431 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.719369 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts\") pod \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.719553 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data\") pod \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.719611 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle\") pod \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.719646 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwk7l\" (UniqueName: \"kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l\") pod \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\" (UID: \"049d7d92-2f9a-4d85-af6d-46a56c4f4072\") " Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.727718 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l" (OuterVolumeSpecName: "kube-api-access-pwk7l") pod "049d7d92-2f9a-4d85-af6d-46a56c4f4072" (UID: "049d7d92-2f9a-4d85-af6d-46a56c4f4072"). InnerVolumeSpecName "kube-api-access-pwk7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.747112 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts" (OuterVolumeSpecName: "scripts") pod "049d7d92-2f9a-4d85-af6d-46a56c4f4072" (UID: "049d7d92-2f9a-4d85-af6d-46a56c4f4072"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.769680 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data" (OuterVolumeSpecName: "config-data") pod "049d7d92-2f9a-4d85-af6d-46a56c4f4072" (UID: "049d7d92-2f9a-4d85-af6d-46a56c4f4072"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.772347 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "049d7d92-2f9a-4d85-af6d-46a56c4f4072" (UID: "049d7d92-2f9a-4d85-af6d-46a56c4f4072"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.822092 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.822854 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.822923 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049d7d92-2f9a-4d85-af6d-46a56c4f4072-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:29 crc kubenswrapper[4852]: I1201 20:24:29.822981 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwk7l\" (UniqueName: \"kubernetes.io/projected/049d7d92-2f9a-4d85-af6d-46a56c4f4072-kube-api-access-pwk7l\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.096351 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.128483 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.128748 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfpf6\" (UniqueName: \"kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.128928 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.128968 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.129010 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.129044 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.129124 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.129174 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd\") pod \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\" (UID: \"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79\") " Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.130256 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.131045 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.136287 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6" (OuterVolumeSpecName: "kube-api-access-vfpf6") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "kube-api-access-vfpf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.136935 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts" (OuterVolumeSpecName: "scripts") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.156606 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.199891 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.229659 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231543 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfpf6\" (UniqueName: \"kubernetes.io/projected/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-kube-api-access-vfpf6\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231584 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231606 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231625 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231644 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231660 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.231676 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.241071 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data" (OuterVolumeSpecName: "config-data") pod "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" (UID: "0e6f204f-a06f-4c0e-aa8b-4fd57b568d79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.248553 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.248536 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lvtxh" event={"ID":"049d7d92-2f9a-4d85-af6d-46a56c4f4072","Type":"ContainerDied","Data":"1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838"} Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.248714 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ac51f969fc2704cbe9f05d5c7370c04eae4ec4a1696203b80206dc329bfc838" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253204 4852 generic.go:334] "Generic (PLEG): container finished" podID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerID="c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" exitCode=0 Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253250 4852 generic.go:334] "Generic (PLEG): container finished" podID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerID="45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" exitCode=0 Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253258 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerDied","Data":"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996"} Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253303 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253332 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerDied","Data":"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf"} Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253368 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0e6f204f-a06f-4c0e-aa8b-4fd57b568d79","Type":"ContainerDied","Data":"eb782267f91a5d2f6d7d7c8e8503fe81b152bba44f7cf038349acb228ea3b64c"} Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.253411 4852 scope.go:117] "RemoveContainer" containerID="90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.313625 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.314491 4852 scope.go:117] "RemoveContainer" containerID="dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.341594 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.356845 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.356929 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.357491 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-central-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357512 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-central-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.357534 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049d7d92-2f9a-4d85-af6d-46a56c4f4072" containerName="nova-cell0-conductor-db-sync" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357541 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="049d7d92-2f9a-4d85-af6d-46a56c4f4072" containerName="nova-cell0-conductor-db-sync" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.357567 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="sg-core" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357574 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="sg-core" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.357602 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="proxy-httpd" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357611 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="proxy-httpd" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.357622 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-notification-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357629 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-notification-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357874 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="proxy-httpd" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357894 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-notification-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357911 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="049d7d92-2f9a-4d85-af6d-46a56c4f4072" containerName="nova-cell0-conductor-db-sync" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357924 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="sg-core" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.357938 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" containerName="ceilometer-central-agent" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.360410 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.370269 4852 scope.go:117] "RemoveContainer" containerID="c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.370302 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.370777 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.371024 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.371195 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.418663 4852 scope.go:117] "RemoveContainer" containerID="45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445507 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445590 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445641 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fvvr\" (UniqueName: \"kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445668 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445706 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.445828 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.446116 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.446148 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.459922 4852 scope.go:117] "RemoveContainer" containerID="90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.461669 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f\": container with ID starting with 90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f not found: ID does not exist" containerID="90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.461755 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f"} err="failed to get container status \"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f\": rpc error: code = NotFound desc = could not find container \"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f\": container with ID starting with 90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.461806 4852 scope.go:117] "RemoveContainer" containerID="dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.462200 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481\": container with ID starting with dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481 not found: ID does not exist" containerID="dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.462241 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481"} err="failed to get container status \"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481\": rpc error: code = NotFound desc = could not find container \"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481\": container with ID starting with dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481 not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.462270 4852 scope.go:117] "RemoveContainer" containerID="c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.462707 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.464846 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996\": container with ID starting with c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996 not found: ID does not exist" containerID="c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.464885 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996"} err="failed to get container status \"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996\": rpc error: code = NotFound desc = could not find container \"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996\": container with ID starting with c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996 not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.464933 4852 scope.go:117] "RemoveContainer" containerID="45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" Dec 01 20:24:30 crc kubenswrapper[4852]: E1201 20:24:30.465680 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf\": container with ID starting with 45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf not found: ID does not exist" containerID="45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.469021 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf"} err="failed to get container status \"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf\": rpc error: code = NotFound desc = could not find container \"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf\": container with ID starting with 45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.469097 4852 scope.go:117] "RemoveContainer" containerID="90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.469713 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f"} err="failed to get container status \"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f\": rpc error: code = NotFound desc = could not find container \"90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f\": container with ID starting with 90be0024b950ef9fd4bd2a3ca90db632927c38e2be4dadfc78f6ccd30e3e549f not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.469742 4852 scope.go:117] "RemoveContainer" containerID="dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.470017 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481"} err="failed to get container status \"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481\": rpc error: code = NotFound desc = could not find container \"dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481\": container with ID starting with dba57aa8ff4f841e48a4685ed367a53ea0218eef6dc4b4968b1a6f6784e80481 not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.470042 4852 scope.go:117] "RemoveContainer" containerID="c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.470362 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996"} err="failed to get container status \"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996\": rpc error: code = NotFound desc = could not find container \"c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996\": container with ID starting with c15b9428b7c6d48a5b02620ca5d836d7404d6b43be756550b2df475f68c06996 not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.470383 4852 scope.go:117] "RemoveContainer" containerID="45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.470671 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf"} err="failed to get container status \"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf\": rpc error: code = NotFound desc = could not find container \"45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf\": container with ID starting with 45452a0184fa3820a8f75242b3e264ea035a5db336f17f82a040b153b0a9bbbf not found: ID does not exist" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.471305 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.488466 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.489001 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c5w7b" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.495332 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548493 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548547 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548581 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548639 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548680 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548702 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvnmt\" (UniqueName: \"kubernetes.io/projected/30f58a14-9ee3-44ea-9737-f14510a50b29-kube-api-access-tvnmt\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548736 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548765 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fvvr\" (UniqueName: \"kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548787 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548812 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.548837 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.549392 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.552787 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.554913 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.555127 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.555661 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.555815 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.556008 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.565630 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fvvr\" (UniqueName: \"kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr\") pod \"ceilometer-0\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.651028 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.651180 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.651260 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvnmt\" (UniqueName: \"kubernetes.io/projected/30f58a14-9ee3-44ea-9737-f14510a50b29-kube-api-access-tvnmt\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.656688 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.656895 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f58a14-9ee3-44ea-9737-f14510a50b29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.678190 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvnmt\" (UniqueName: \"kubernetes.io/projected/30f58a14-9ee3-44ea-9737-f14510a50b29-kube-api-access-tvnmt\") pod \"nova-cell0-conductor-0\" (UID: \"30f58a14-9ee3-44ea-9737-f14510a50b29\") " pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.695066 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:24:30 crc kubenswrapper[4852]: I1201 20:24:30.811331 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:31 crc kubenswrapper[4852]: I1201 20:24:31.196911 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:24:31 crc kubenswrapper[4852]: I1201 20:24:31.267870 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerStarted","Data":"a2ed2adce1722ea89b65575982ea8bbb8bfb147c782df0a9484f02271868b0ea"} Dec 01 20:24:31 crc kubenswrapper[4852]: W1201 20:24:31.308776 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30f58a14_9ee3_44ea_9737_f14510a50b29.slice/crio-1adbb47dd3420fbf041f96a073b5536b3f279a256579d86b91819016334faded WatchSource:0}: Error finding container 1adbb47dd3420fbf041f96a073b5536b3f279a256579d86b91819016334faded: Status 404 returned error can't find the container with id 1adbb47dd3420fbf041f96a073b5536b3f279a256579d86b91819016334faded Dec 01 20:24:31 crc kubenswrapper[4852]: I1201 20:24:31.309889 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.279627 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"30f58a14-9ee3-44ea-9737-f14510a50b29","Type":"ContainerStarted","Data":"63fc759320cc27ba39d6de8235b66ed5329ad6f14d4925b4478a30519f71cac2"} Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.280125 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.280146 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"30f58a14-9ee3-44ea-9737-f14510a50b29","Type":"ContainerStarted","Data":"1adbb47dd3420fbf041f96a073b5536b3f279a256579d86b91819016334faded"} Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.281896 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerStarted","Data":"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18"} Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.302274 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.302250846 podStartE2EDuration="2.302250846s" podCreationTimestamp="2025-12-01 20:24:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:32.29855671 +0000 UTC m=+1192.225638127" watchObservedRunningTime="2025-12-01 20:24:32.302250846 +0000 UTC m=+1192.229332263" Dec 01 20:24:32 crc kubenswrapper[4852]: I1201 20:24:32.334841 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6f204f-a06f-4c0e-aa8b-4fd57b568d79" path="/var/lib/kubelet/pods/0e6f204f-a06f-4c0e-aa8b-4fd57b568d79/volumes" Dec 01 20:24:33 crc kubenswrapper[4852]: I1201 20:24:33.296138 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerStarted","Data":"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873"} Dec 01 20:24:34 crc kubenswrapper[4852]: I1201 20:24:34.309023 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerStarted","Data":"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58"} Dec 01 20:24:36 crc kubenswrapper[4852]: I1201 20:24:36.346682 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:24:36 crc kubenswrapper[4852]: I1201 20:24:36.347260 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerStarted","Data":"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2"} Dec 01 20:24:36 crc kubenswrapper[4852]: I1201 20:24:36.389845 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.926683495 podStartE2EDuration="6.389813263s" podCreationTimestamp="2025-12-01 20:24:30 +0000 UTC" firstStartedPulling="2025-12-01 20:24:31.205898788 +0000 UTC m=+1191.132980205" lastFinishedPulling="2025-12-01 20:24:35.669028536 +0000 UTC m=+1195.596109973" observedRunningTime="2025-12-01 20:24:36.388316706 +0000 UTC m=+1196.315398153" watchObservedRunningTime="2025-12-01 20:24:36.389813263 +0000 UTC m=+1196.316894680" Dec 01 20:24:40 crc kubenswrapper[4852]: I1201 20:24:40.846747 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.362918 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-cgf5k"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.364755 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.368927 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.369291 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.375970 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cgf5k"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.420083 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.420198 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.420292 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dkrt\" (UniqueName: \"kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.420318 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.523205 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.523274 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.523354 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dkrt\" (UniqueName: \"kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.523373 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.531877 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.532556 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.550445 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.567372 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dkrt\" (UniqueName: \"kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt\") pod \"nova-cell0-cell-mapping-cgf5k\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.596517 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.598796 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.604213 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.628044 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.650745 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.653014 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.657609 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.672586 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.703336 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.739685 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.739825 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.739870 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.740008 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-454sm\" (UniqueName: \"kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.740104 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.740129 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.740268 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.740357 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt54l\" (UniqueName: \"kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.763642 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.765497 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.774416 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.813489 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.830520 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.832335 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845160 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845240 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl88v\" (UniqueName: \"kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845277 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt54l\" (UniqueName: \"kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845305 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845342 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845379 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845401 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845424 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-454sm\" (UniqueName: \"kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845474 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845492 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.845527 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.847227 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.848694 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.850605 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.851187 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.853159 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.855616 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.867996 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.868054 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.868310 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.870109 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.871181 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt54l\" (UniqueName: \"kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l\") pod \"nova-metadata-0\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " pod="openstack/nova-metadata-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.877508 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.881310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-454sm\" (UniqueName: \"kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm\") pod \"nova-api-0\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " pod="openstack/nova-api-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947120 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4xnk\" (UniqueName: \"kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947203 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947271 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl88v\" (UniqueName: \"kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947301 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947333 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947352 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947380 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947401 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz89k\" (UniqueName: \"kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947467 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947505 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947545 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.947572 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.952109 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.973161 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.974382 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl88v\" (UniqueName: \"kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v\") pod \"nova-scheduler-0\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:41 crc kubenswrapper[4852]: I1201 20:24:41.982260 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.023364 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.051232 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.051336 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4xnk\" (UniqueName: \"kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.051972 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.052058 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.052372 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.052854 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.052922 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.053010 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz89k\" (UniqueName: \"kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.053041 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.053130 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.053252 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.054005 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.054891 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.054983 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.059035 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.059145 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.069558 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz89k\" (UniqueName: \"kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k\") pod \"dnsmasq-dns-75df6cf455-xldr7\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.070060 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4xnk\" (UniqueName: \"kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk\") pod \"nova-cell1-novncproxy-0\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.106999 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.272693 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.283286 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.353426 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cgf5k"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.358194 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9bmvk"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.361787 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.365320 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.365644 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.370204 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9bmvk"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.414694 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cgf5k" event={"ID":"d41256a2-ede4-4222-a4e5-1432e3e18e6f","Type":"ContainerStarted","Data":"a2e45a03752577577fe27263b7f896731e38ec51ccc8b47df201753973031b16"} Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.464006 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.464097 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4klrz\" (UniqueName: \"kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.464151 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.464228 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.501535 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.566796 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.567280 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4klrz\" (UniqueName: \"kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.567329 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.567378 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.593582 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.595605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.598062 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.625710 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.637113 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4klrz\" (UniqueName: \"kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz\") pod \"nova-cell1-conductor-db-sync-9bmvk\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.689923 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.700936 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.882436 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:24:42 crc kubenswrapper[4852]: I1201 20:24:42.974328 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:24:43 crc kubenswrapper[4852]: W1201 20:24:43.181116 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f952c0f_3899_4044_b025_a53f29bb3a59.slice/crio-712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1 WatchSource:0}: Error finding container 712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1: Status 404 returned error can't find the container with id 712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1 Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.182174 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9bmvk"] Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.430127 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d","Type":"ContainerStarted","Data":"c7aa9dee549969ef0a45c5b1a417287c8a4fb8a1dc7d59e901a4421252c86a1a"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.434649 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerStarted","Data":"ed41723220de4e250ba28b1115bfcded0e57e6849c074e46311d7a19acc4039f"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.436914 4852 generic.go:334] "Generic (PLEG): container finished" podID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerID="fbe3bf4c975f4aa68644891a094272236e06b166ea34de3fae2005726bd64096" exitCode=0 Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.436998 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" event={"ID":"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36","Type":"ContainerDied","Data":"fbe3bf4c975f4aa68644891a094272236e06b166ea34de3fae2005726bd64096"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.437021 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" event={"ID":"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36","Type":"ContainerStarted","Data":"ecee5814455bab16670e77fba436a20049604887c4a853f6ba4761e2b42a3d81"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.443934 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerStarted","Data":"66b8ddc58028e689c240e2bf549589fbeb1dbcef1b4c84b8165f5d298bdedcc7"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.446970 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" event={"ID":"8f952c0f-3899-4044-b025-a53f29bb3a59","Type":"ContainerStarted","Data":"566b6bea73bc747292f38185facbf6f05f9445e524a47398ee0825ea80995676"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.447098 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" event={"ID":"8f952c0f-3899-4044-b025-a53f29bb3a59","Type":"ContainerStarted","Data":"712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.448839 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cgf5k" event={"ID":"d41256a2-ede4-4222-a4e5-1432e3e18e6f","Type":"ContainerStarted","Data":"199787d5304b740bfaae368ca3715767d6a3122959c93412ab50419ed2f53548"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.450983 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86","Type":"ContainerStarted","Data":"dd0bc2009f20ee7f96f84fb6585a8c911da3f35728c7b0bc79ab3bd836ee55c9"} Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.494629 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-cgf5k" podStartSLOduration=2.494610067 podStartE2EDuration="2.494610067s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:43.491272802 +0000 UTC m=+1203.418354219" watchObservedRunningTime="2025-12-01 20:24:43.494610067 +0000 UTC m=+1203.421691484" Dec 01 20:24:43 crc kubenswrapper[4852]: I1201 20:24:43.516669 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" podStartSLOduration=1.516647949 podStartE2EDuration="1.516647949s" podCreationTimestamp="2025-12-01 20:24:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:43.51063804 +0000 UTC m=+1203.437719817" watchObservedRunningTime="2025-12-01 20:24:43.516647949 +0000 UTC m=+1203.443729366" Dec 01 20:24:44 crc kubenswrapper[4852]: I1201 20:24:44.473529 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" event={"ID":"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36","Type":"ContainerStarted","Data":"a53ec01fb098442e31e8010061eea15998b5773c119c97453efb5da80fe5e8b1"} Dec 01 20:24:44 crc kubenswrapper[4852]: I1201 20:24:44.474726 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:44 crc kubenswrapper[4852]: I1201 20:24:44.505180 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" podStartSLOduration=3.50514779 podStartE2EDuration="3.50514779s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:44.496912432 +0000 UTC m=+1204.423993849" watchObservedRunningTime="2025-12-01 20:24:44.50514779 +0000 UTC m=+1204.432229207" Dec 01 20:24:45 crc kubenswrapper[4852]: I1201 20:24:45.475743 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:24:45 crc kubenswrapper[4852]: I1201 20:24:45.488184 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.517036 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86","Type":"ContainerStarted","Data":"c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0"} Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.521881 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d","Type":"ContainerStarted","Data":"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec"} Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.522059 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec" gracePeriod=30 Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.527895 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerStarted","Data":"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a"} Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.528157 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-log" containerID="cri-o://78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a" gracePeriod=30 Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.528169 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-metadata" containerID="cri-o://bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5" gracePeriod=30 Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.543125 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerStarted","Data":"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951"} Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.552913 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.351719522 podStartE2EDuration="5.552879574s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="2025-12-01 20:24:42.695522952 +0000 UTC m=+1202.622604359" lastFinishedPulling="2025-12-01 20:24:45.896682994 +0000 UTC m=+1205.823764411" observedRunningTime="2025-12-01 20:24:46.537055867 +0000 UTC m=+1206.464137294" watchObservedRunningTime="2025-12-01 20:24:46.552879574 +0000 UTC m=+1206.479961021" Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.560561 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.605295102 podStartE2EDuration="5.560534834s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="2025-12-01 20:24:42.981359175 +0000 UTC m=+1202.908440592" lastFinishedPulling="2025-12-01 20:24:45.936598907 +0000 UTC m=+1205.863680324" observedRunningTime="2025-12-01 20:24:46.554326049 +0000 UTC m=+1206.481407466" watchObservedRunningTime="2025-12-01 20:24:46.560534834 +0000 UTC m=+1206.487616251" Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.592581 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.302737504 podStartE2EDuration="5.592558449s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="2025-12-01 20:24:42.606803877 +0000 UTC m=+1202.533885294" lastFinishedPulling="2025-12-01 20:24:45.896624822 +0000 UTC m=+1205.823706239" observedRunningTime="2025-12-01 20:24:46.585528089 +0000 UTC m=+1206.512609516" watchObservedRunningTime="2025-12-01 20:24:46.592558449 +0000 UTC m=+1206.519639866" Dec 01 20:24:46 crc kubenswrapper[4852]: I1201 20:24:46.617199 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.244503438 podStartE2EDuration="5.617174343s" podCreationTimestamp="2025-12-01 20:24:41 +0000 UTC" firstStartedPulling="2025-12-01 20:24:42.508918124 +0000 UTC m=+1202.435999531" lastFinishedPulling="2025-12-01 20:24:45.881589019 +0000 UTC m=+1205.808670436" observedRunningTime="2025-12-01 20:24:46.608512141 +0000 UTC m=+1206.535593568" watchObservedRunningTime="2025-12-01 20:24:46.617174343 +0000 UTC m=+1206.544255760" Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.023833 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.024355 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.108972 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.284029 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.556157 4852 generic.go:334] "Generic (PLEG): container finished" podID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerID="78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a" exitCode=143 Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.556273 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerDied","Data":"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a"} Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.556888 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerStarted","Data":"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5"} Dec 01 20:24:47 crc kubenswrapper[4852]: I1201 20:24:47.558936 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerStarted","Data":"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9"} Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.603872 4852 generic.go:334] "Generic (PLEG): container finished" podID="8f952c0f-3899-4044-b025-a53f29bb3a59" containerID="566b6bea73bc747292f38185facbf6f05f9445e524a47398ee0825ea80995676" exitCode=0 Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.603926 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" event={"ID":"8f952c0f-3899-4044-b025-a53f29bb3a59","Type":"ContainerDied","Data":"566b6bea73bc747292f38185facbf6f05f9445e524a47398ee0825ea80995676"} Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.607799 4852 generic.go:334] "Generic (PLEG): container finished" podID="d41256a2-ede4-4222-a4e5-1432e3e18e6f" containerID="199787d5304b740bfaae368ca3715767d6a3122959c93412ab50419ed2f53548" exitCode=0 Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.607883 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cgf5k" event={"ID":"d41256a2-ede4-4222-a4e5-1432e3e18e6f","Type":"ContainerDied","Data":"199787d5304b740bfaae368ca3715767d6a3122959c93412ab50419ed2f53548"} Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.983046 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:24:51 crc kubenswrapper[4852]: I1201 20:24:51.983116 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:24:52 crc kubenswrapper[4852]: I1201 20:24:52.108757 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 20:24:52 crc kubenswrapper[4852]: I1201 20:24:52.144081 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 20:24:52 crc kubenswrapper[4852]: I1201 20:24:52.275864 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:52.390085 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:52.390339 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c55f6679-z26fd" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="dnsmasq-dns" containerID="cri-o://08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff" gracePeriod=10 Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.119810 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.135869 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.160751 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.594723 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.621159 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data\") pod \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.621237 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dkrt\" (UniqueName: \"kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt\") pod \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.621278 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle\") pod \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.621298 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts\") pod \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\" (UID: \"d41256a2-ede4-4222-a4e5-1432e3e18e6f\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.632750 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt" (OuterVolumeSpecName: "kube-api-access-9dkrt") pod "d41256a2-ede4-4222-a4e5-1432e3e18e6f" (UID: "d41256a2-ede4-4222-a4e5-1432e3e18e6f"). InnerVolumeSpecName "kube-api-access-9dkrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.652353 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts" (OuterVolumeSpecName: "scripts") pod "d41256a2-ede4-4222-a4e5-1432e3e18e6f" (UID: "d41256a2-ede4-4222-a4e5-1432e3e18e6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.698816 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d41256a2-ede4-4222-a4e5-1432e3e18e6f" (UID: "d41256a2-ede4-4222-a4e5-1432e3e18e6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.699990 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data" (OuterVolumeSpecName: "config-data") pod "d41256a2-ede4-4222-a4e5-1432e3e18e6f" (UID: "d41256a2-ede4-4222-a4e5-1432e3e18e6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.723741 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.723789 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dkrt\" (UniqueName: \"kubernetes.io/projected/d41256a2-ede4-4222-a4e5-1432e3e18e6f-kube-api-access-9dkrt\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.723802 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.723812 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41256a2-ede4-4222-a4e5-1432e3e18e6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.756313 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.761998 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.822544 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.822897 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-log" containerID="cri-o://6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951" gracePeriod=30 Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.823440 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-api" containerID="cri-o://1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9" gracePeriod=30 Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825754 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825827 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825854 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data\") pod \"8f952c0f-3899-4044-b025-a53f29bb3a59\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825889 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts\") pod \"8f952c0f-3899-4044-b025-a53f29bb3a59\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825943 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.825981 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle\") pod \"8f952c0f-3899-4044-b025-a53f29bb3a59\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.826006 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.826046 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nxwm\" (UniqueName: \"kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.826097 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4klrz\" (UniqueName: \"kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz\") pod \"8f952c0f-3899-4044-b025-a53f29bb3a59\" (UID: \"8f952c0f-3899-4044-b025-a53f29bb3a59\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.826120 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb\") pod \"119db55f-ccc9-47b3-b81c-c7907841b276\" (UID: \"119db55f-ccc9-47b3-b81c-c7907841b276\") " Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.841160 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm" (OuterVolumeSpecName: "kube-api-access-4nxwm") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "kube-api-access-4nxwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.860103 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts" (OuterVolumeSpecName: "scripts") pod "8f952c0f-3899-4044-b025-a53f29bb3a59" (UID: "8f952c0f-3899-4044-b025-a53f29bb3a59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.866622 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz" (OuterVolumeSpecName: "kube-api-access-4klrz") pod "8f952c0f-3899-4044-b025-a53f29bb3a59" (UID: "8f952c0f-3899-4044-b025-a53f29bb3a59"). InnerVolumeSpecName "kube-api-access-4klrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.888137 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f952c0f-3899-4044-b025-a53f29bb3a59" (UID: "8f952c0f-3899-4044-b025-a53f29bb3a59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.903253 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data" (OuterVolumeSpecName: "config-data") pod "8f952c0f-3899-4044-b025-a53f29bb3a59" (UID: "8f952c0f-3899-4044-b025-a53f29bb3a59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.908269 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.911479 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.914261 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.918030 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config" (OuterVolumeSpecName: "config") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.919187 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "119db55f-ccc9-47b3-b81c-c7907841b276" (UID: "119db55f-ccc9-47b3-b81c-c7907841b276"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930328 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nxwm\" (UniqueName: \"kubernetes.io/projected/119db55f-ccc9-47b3-b81c-c7907841b276-kube-api-access-4nxwm\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930550 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4klrz\" (UniqueName: \"kubernetes.io/projected/8f952c0f-3899-4044-b025-a53f29bb3a59-kube-api-access-4klrz\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930650 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930711 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930776 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930839 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.930937 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.931002 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.931067 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f952c0f-3899-4044-b025-a53f29bb3a59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:53 crc kubenswrapper[4852]: I1201 20:24:53.931125 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/119db55f-ccc9-47b3-b81c-c7907841b276-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.105413 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cgf5k" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.105443 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cgf5k" event={"ID":"d41256a2-ede4-4222-a4e5-1432e3e18e6f","Type":"ContainerDied","Data":"a2e45a03752577577fe27263b7f896731e38ec51ccc8b47df201753973031b16"} Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.105522 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2e45a03752577577fe27263b7f896731e38ec51ccc8b47df201753973031b16" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.118377 4852 generic.go:334] "Generic (PLEG): container finished" podID="119db55f-ccc9-47b3-b81c-c7907841b276" containerID="08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff" exitCode=0 Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.118694 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c55f6679-z26fd" event={"ID":"119db55f-ccc9-47b3-b81c-c7907841b276","Type":"ContainerDied","Data":"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff"} Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.119433 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c55f6679-z26fd" event={"ID":"119db55f-ccc9-47b3-b81c-c7907841b276","Type":"ContainerDied","Data":"46ff4c3617bf8798d6a0822398d5310165f885ce1591490b819fade80a8e6ebf"} Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.119509 4852 scope.go:117] "RemoveContainer" containerID="08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.118860 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c55f6679-z26fd" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.126560 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.126567 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9bmvk" event={"ID":"8f952c0f-3899-4044-b025-a53f29bb3a59","Type":"ContainerDied","Data":"712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1"} Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.127185 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="712954158f4159a0dca16246c9714bcc65f0fea221ffdc336a3c97d1ac91dff1" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.128485 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.224796 4852 scope.go:117] "RemoveContainer" containerID="cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.241276 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.253628 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c55f6679-z26fd"] Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.254645 4852 scope.go:117] "RemoveContainer" containerID="08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff" Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.256055 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff\": container with ID starting with 08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff not found: ID does not exist" containerID="08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.256136 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff"} err="failed to get container status \"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff\": rpc error: code = NotFound desc = could not find container \"08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff\": container with ID starting with 08f54377bae12457121cce2439b1efd92b95d5accf52457369c896256baab2ff not found: ID does not exist" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.256186 4852 scope.go:117] "RemoveContainer" containerID="cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b" Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.256751 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b\": container with ID starting with cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b not found: ID does not exist" containerID="cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.256784 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b"} err="failed to get container status \"cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b\": rpc error: code = NotFound desc = could not find container \"cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b\": container with ID starting with cec8e133da954e36906c54d90bf5541a1e67a6916215e0d5c4b128a3c7425e2b not found: ID does not exist" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.335382 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" path="/var/lib/kubelet/pods/119db55f-ccc9-47b3-b81c-c7907841b276/volumes" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.891386 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.892160 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41256a2-ede4-4222-a4e5-1432e3e18e6f" containerName="nova-manage" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892188 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41256a2-ede4-4222-a4e5-1432e3e18e6f" containerName="nova-manage" Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.892207 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="dnsmasq-dns" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892216 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="dnsmasq-dns" Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.892226 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="init" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892234 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="init" Dec 01 20:24:54 crc kubenswrapper[4852]: E1201 20:24:54.892249 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f952c0f-3899-4044-b025-a53f29bb3a59" containerName="nova-cell1-conductor-db-sync" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892256 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f952c0f-3899-4044-b025-a53f29bb3a59" containerName="nova-cell1-conductor-db-sync" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892441 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f952c0f-3899-4044-b025-a53f29bb3a59" containerName="nova-cell1-conductor-db-sync" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892482 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="119db55f-ccc9-47b3-b81c-c7907841b276" containerName="dnsmasq-dns" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.892491 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41256a2-ede4-4222-a4e5-1432e3e18e6f" containerName="nova-manage" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.893219 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.895981 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.909994 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.959279 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.959926 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4vhs\" (UniqueName: \"kubernetes.io/projected/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-kube-api-access-b4vhs\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:54 crc kubenswrapper[4852]: I1201 20:24:54.959957 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.063038 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.063201 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4vhs\" (UniqueName: \"kubernetes.io/projected/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-kube-api-access-b4vhs\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.063243 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.078606 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.082382 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.099253 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4vhs\" (UniqueName: \"kubernetes.io/projected/9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04-kube-api-access-b4vhs\") pod \"nova-cell1-conductor-0\" (UID: \"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04\") " pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.139649 4852 generic.go:334] "Generic (PLEG): container finished" podID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerID="6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951" exitCode=143 Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.140202 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerDied","Data":"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951"} Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.142142 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerName="nova-scheduler-scheduler" containerID="cri-o://c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" gracePeriod=30 Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.213397 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:55 crc kubenswrapper[4852]: I1201 20:24:55.698665 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 20:24:56 crc kubenswrapper[4852]: I1201 20:24:56.153915 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04","Type":"ContainerStarted","Data":"2adc2a2bcc076bea2d2987cc7441b048d01517683f2132a606018bdce075a702"} Dec 01 20:24:56 crc kubenswrapper[4852]: I1201 20:24:56.154390 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 01 20:24:56 crc kubenswrapper[4852]: I1201 20:24:56.154409 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04","Type":"ContainerStarted","Data":"b041f8a288e86796620ad38a908ed8c8f7b49654c66f7023e28650d5282ca8c5"} Dec 01 20:24:56 crc kubenswrapper[4852]: I1201 20:24:56.183133 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.183109728 podStartE2EDuration="2.183109728s" podCreationTimestamp="2025-12-01 20:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:24:56.174073455 +0000 UTC m=+1216.101154872" watchObservedRunningTime="2025-12-01 20:24:56.183109728 +0000 UTC m=+1216.110191145" Dec 01 20:24:57 crc kubenswrapper[4852]: E1201 20:24:57.112846 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:24:57 crc kubenswrapper[4852]: E1201 20:24:57.116303 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:24:57 crc kubenswrapper[4852]: E1201 20:24:57.121300 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:24:57 crc kubenswrapper[4852]: E1201 20:24:57.123004 4852 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerName="nova-scheduler-scheduler" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.189293 4852 generic.go:334] "Generic (PLEG): container finished" podID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerID="c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" exitCode=0 Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.189387 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86","Type":"ContainerDied","Data":"c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0"} Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.484503 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.642177 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data\") pod \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.642643 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle\") pod \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.642840 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl88v\" (UniqueName: \"kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v\") pod \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\" (UID: \"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86\") " Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.652436 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v" (OuterVolumeSpecName: "kube-api-access-fl88v") pod "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" (UID: "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86"). InnerVolumeSpecName "kube-api-access-fl88v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.676151 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data" (OuterVolumeSpecName: "config-data") pod "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" (UID: "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.682127 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" (UID: "f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.746098 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.746152 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl88v\" (UniqueName: \"kubernetes.io/projected/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-kube-api-access-fl88v\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:58 crc kubenswrapper[4852]: I1201 20:24:58.746165 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.202937 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86","Type":"ContainerDied","Data":"dd0bc2009f20ee7f96f84fb6585a8c911da3f35728c7b0bc79ab3bd836ee55c9"} Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.202984 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.203005 4852 scope.go:117] "RemoveContainer" containerID="c8ab28336e385c83c8d7dc8ed2c9266e35ce4e62a86c8e56a04db2f67bf52ec0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.244754 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.256742 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.267481 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:59 crc kubenswrapper[4852]: E1201 20:24:59.267980 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerName="nova-scheduler-scheduler" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.268002 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerName="nova-scheduler-scheduler" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.268231 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" containerName="nova-scheduler-scheduler" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.272618 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.281335 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.284710 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.462590 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.462729 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrbjx\" (UniqueName: \"kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.462887 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.566035 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.566125 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.566208 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrbjx\" (UniqueName: \"kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.573004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.573468 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.590778 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrbjx\" (UniqueName: \"kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx\") pod \"nova-scheduler-0\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.696226 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.776437 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.875257 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs\") pod \"bb04bd51-ad81-43a6-a502-d22c6df69f26\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.875345 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data\") pod \"bb04bd51-ad81-43a6-a502-d22c6df69f26\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.875399 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-454sm\" (UniqueName: \"kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm\") pod \"bb04bd51-ad81-43a6-a502-d22c6df69f26\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.875523 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle\") pod \"bb04bd51-ad81-43a6-a502-d22c6df69f26\" (UID: \"bb04bd51-ad81-43a6-a502-d22c6df69f26\") " Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.879006 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs" (OuterVolumeSpecName: "logs") pod "bb04bd51-ad81-43a6-a502-d22c6df69f26" (UID: "bb04bd51-ad81-43a6-a502-d22c6df69f26"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.918737 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data" (OuterVolumeSpecName: "config-data") pod "bb04bd51-ad81-43a6-a502-d22c6df69f26" (UID: "bb04bd51-ad81-43a6-a502-d22c6df69f26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.919923 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm" (OuterVolumeSpecName: "kube-api-access-454sm") pod "bb04bd51-ad81-43a6-a502-d22c6df69f26" (UID: "bb04bd51-ad81-43a6-a502-d22c6df69f26"). InnerVolumeSpecName "kube-api-access-454sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.940025 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb04bd51-ad81-43a6-a502-d22c6df69f26" (UID: "bb04bd51-ad81-43a6-a502-d22c6df69f26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.978572 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-454sm\" (UniqueName: \"kubernetes.io/projected/bb04bd51-ad81-43a6-a502-d22c6df69f26-kube-api-access-454sm\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.978607 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.978619 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb04bd51-ad81-43a6-a502-d22c6df69f26-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:24:59 crc kubenswrapper[4852]: I1201 20:24:59.978628 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb04bd51-ad81-43a6-a502-d22c6df69f26-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.214816 4852 generic.go:334] "Generic (PLEG): container finished" podID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerID="1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9" exitCode=0 Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.214894 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.214905 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerDied","Data":"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9"} Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.215310 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb04bd51-ad81-43a6-a502-d22c6df69f26","Type":"ContainerDied","Data":"66b8ddc58028e689c240e2bf549589fbeb1dbcef1b4c84b8165f5d298bdedcc7"} Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.215332 4852 scope.go:117] "RemoveContainer" containerID="1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.254447 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.255723 4852 scope.go:117] "RemoveContainer" containerID="6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.264432 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.267439 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.290517 4852 scope.go:117] "RemoveContainer" containerID="1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.291658 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:00 crc kubenswrapper[4852]: E1201 20:25:00.292252 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-log" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.292276 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-log" Dec 01 20:25:00 crc kubenswrapper[4852]: E1201 20:25:00.292296 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-api" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.292304 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-api" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.292563 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-log" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.292600 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" containerName="nova-api-api" Dec 01 20:25:00 crc kubenswrapper[4852]: E1201 20:25:00.293734 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9\": container with ID starting with 1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9 not found: ID does not exist" containerID="1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.293852 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9"} err="failed to get container status \"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9\": rpc error: code = NotFound desc = could not find container \"1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9\": container with ID starting with 1007a04f1aaa5bef41fd7b10f62f52e09df3fd01414f44b107bb3ca6302715b9 not found: ID does not exist" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.293920 4852 scope.go:117] "RemoveContainer" containerID="6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.294278 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: E1201 20:25:00.295050 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951\": container with ID starting with 6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951 not found: ID does not exist" containerID="6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.295106 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951"} err="failed to get container status \"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951\": rpc error: code = NotFound desc = could not find container \"6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951\": container with ID starting with 6d150ddf12e1f5b4c46cd16428b454bb691a2f0f3239d03e759b6eb994c33951 not found: ID does not exist" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.297596 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.309569 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.350612 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb04bd51-ad81-43a6-a502-d22c6df69f26" path="/var/lib/kubelet/pods/bb04bd51-ad81-43a6-a502-d22c6df69f26/volumes" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.351355 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86" path="/var/lib/kubelet/pods/f5f2cdd8-c9d2-47a8-b54f-adb30b5e3a86/volumes" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.387626 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.491784 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.492736 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.492877 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llgxk\" (UniqueName: \"kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.493099 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.595628 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.597164 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.597845 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.598051 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llgxk\" (UniqueName: \"kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.598839 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.601689 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.605321 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.617160 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llgxk\" (UniqueName: \"kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk\") pod \"nova-api-0\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.625379 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:00 crc kubenswrapper[4852]: I1201 20:25:00.717254 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 20:25:01 crc kubenswrapper[4852]: I1201 20:25:01.169413 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:01 crc kubenswrapper[4852]: I1201 20:25:01.234697 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerStarted","Data":"b0690cb42300fadb18353e8e66bae3f89bca78ae4a6451840d1b35a3ba5f16b4"} Dec 01 20:25:01 crc kubenswrapper[4852]: I1201 20:25:01.237860 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c07a26c9-d5fd-47c7-a98b-c2753e892efc","Type":"ContainerStarted","Data":"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e"} Dec 01 20:25:01 crc kubenswrapper[4852]: I1201 20:25:01.237916 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c07a26c9-d5fd-47c7-a98b-c2753e892efc","Type":"ContainerStarted","Data":"ed4b26e1402536a0abb71b9100a85691eb36e51b22d4dd35424645db67b1d077"} Dec 01 20:25:01 crc kubenswrapper[4852]: I1201 20:25:01.260310 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.260286351 podStartE2EDuration="2.260286351s" podCreationTimestamp="2025-12-01 20:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:01.2579999 +0000 UTC m=+1221.185081327" watchObservedRunningTime="2025-12-01 20:25:01.260286351 +0000 UTC m=+1221.187367768" Dec 01 20:25:02 crc kubenswrapper[4852]: I1201 20:25:02.255993 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerStarted","Data":"10040f12a70496222daa324c47e668d24b6fe41d75146052bc9541c4ce7ccb58"} Dec 01 20:25:02 crc kubenswrapper[4852]: I1201 20:25:02.256441 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerStarted","Data":"1cbc33d327c5a39e4e3435fc5e270c1b38605e186e2792a14777047c9da93a47"} Dec 01 20:25:04 crc kubenswrapper[4852]: I1201 20:25:04.696786 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 20:25:09 crc kubenswrapper[4852]: I1201 20:25:09.696694 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 20:25:09 crc kubenswrapper[4852]: I1201 20:25:09.738112 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 20:25:09 crc kubenswrapper[4852]: I1201 20:25:09.778360 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=9.778326911 podStartE2EDuration="9.778326911s" podCreationTimestamp="2025-12-01 20:25:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:02.2885119 +0000 UTC m=+1222.215593317" watchObservedRunningTime="2025-12-01 20:25:09.778326911 +0000 UTC m=+1229.705408368" Dec 01 20:25:10 crc kubenswrapper[4852]: I1201 20:25:10.450509 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 20:25:10 crc kubenswrapper[4852]: I1201 20:25:10.626437 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:25:10 crc kubenswrapper[4852]: I1201 20:25:10.626848 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:25:11 crc kubenswrapper[4852]: I1201 20:25:11.708867 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:11 crc kubenswrapper[4852]: I1201 20:25:11.708889 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.047443 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.053054 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.192831 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xt54l\" (UniqueName: \"kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l\") pod \"7da3536d-ea64-4b39-a315-586c31b8bfb3\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.192920 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data\") pod \"7da3536d-ea64-4b39-a315-586c31b8bfb3\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.193140 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle\") pod \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.193209 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle\") pod \"7da3536d-ea64-4b39-a315-586c31b8bfb3\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.193246 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4xnk\" (UniqueName: \"kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk\") pod \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.193380 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs\") pod \"7da3536d-ea64-4b39-a315-586c31b8bfb3\" (UID: \"7da3536d-ea64-4b39-a315-586c31b8bfb3\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.193483 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data\") pod \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\" (UID: \"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d\") " Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.201326 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l" (OuterVolumeSpecName: "kube-api-access-xt54l") pod "7da3536d-ea64-4b39-a315-586c31b8bfb3" (UID: "7da3536d-ea64-4b39-a315-586c31b8bfb3"). InnerVolumeSpecName "kube-api-access-xt54l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.205359 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk" (OuterVolumeSpecName: "kube-api-access-t4xnk") pod "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" (UID: "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d"). InnerVolumeSpecName "kube-api-access-t4xnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.205777 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs" (OuterVolumeSpecName: "logs") pod "7da3536d-ea64-4b39-a315-586c31b8bfb3" (UID: "7da3536d-ea64-4b39-a315-586c31b8bfb3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.229491 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7da3536d-ea64-4b39-a315-586c31b8bfb3" (UID: "7da3536d-ea64-4b39-a315-586c31b8bfb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.230880 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data" (OuterVolumeSpecName: "config-data") pod "7da3536d-ea64-4b39-a315-586c31b8bfb3" (UID: "7da3536d-ea64-4b39-a315-586c31b8bfb3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.245054 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data" (OuterVolumeSpecName: "config-data") pod "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" (UID: "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.266021 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" (UID: "6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.295856 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xt54l\" (UniqueName: \"kubernetes.io/projected/7da3536d-ea64-4b39-a315-586c31b8bfb3-kube-api-access-xt54l\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.296485 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.296590 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.296674 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7da3536d-ea64-4b39-a315-586c31b8bfb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.296947 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4xnk\" (UniqueName: \"kubernetes.io/projected/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-kube-api-access-t4xnk\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.297025 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7da3536d-ea64-4b39-a315-586c31b8bfb3-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.297099 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.427899 4852 generic.go:334] "Generic (PLEG): container finished" podID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerID="bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5" exitCode=137 Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.427988 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerDied","Data":"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5"} Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.428025 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7da3536d-ea64-4b39-a315-586c31b8bfb3","Type":"ContainerDied","Data":"ed41723220de4e250ba28b1115bfcded0e57e6849c074e46311d7a19acc4039f"} Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.429395 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.431549 4852 scope.go:117] "RemoveContainer" containerID="bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.435964 4852 generic.go:334] "Generic (PLEG): container finished" podID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" containerID="6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec" exitCode=137 Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.436025 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d","Type":"ContainerDied","Data":"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec"} Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.436063 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.436069 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d","Type":"ContainerDied","Data":"c7aa9dee549969ef0a45c5b1a417287c8a4fb8a1dc7d59e901a4421252c86a1a"} Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.488564 4852 scope.go:117] "RemoveContainer" containerID="78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.503475 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.512005 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.520701 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.529707 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.532149 4852 scope.go:117] "RemoveContainer" containerID="bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.533061 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5\": container with ID starting with bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5 not found: ID does not exist" containerID="bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.533129 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5"} err="failed to get container status \"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5\": rpc error: code = NotFound desc = could not find container \"bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5\": container with ID starting with bbebdf0ff9b3e42bc6b0bb0ef7baa5f24cdbb9d988deaa4b1ca535b66235f2e5 not found: ID does not exist" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.533178 4852 scope.go:117] "RemoveContainer" containerID="78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.533818 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a\": container with ID starting with 78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a not found: ID does not exist" containerID="78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.533877 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a"} err="failed to get container status \"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a\": rpc error: code = NotFound desc = could not find container \"78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a\": container with ID starting with 78566e647ba1a7900daad0184d007478ab6f54c5dfca519ddd54b01453d09e2a not found: ID does not exist" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.533914 4852 scope.go:117] "RemoveContainer" containerID="6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.578799 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.579291 4852 scope.go:117] "RemoveContainer" containerID="6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.580048 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-log" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.580089 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-log" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.580213 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.580236 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.580276 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-metadata" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.580284 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-metadata" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.581000 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.581068 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-metadata" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.581087 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" containerName="nova-metadata-log" Dec 01 20:25:17 crc kubenswrapper[4852]: E1201 20:25:17.582015 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec\": container with ID starting with 6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec not found: ID does not exist" containerID="6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.582068 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec"} err="failed to get container status \"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec\": rpc error: code = NotFound desc = could not find container \"6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec\": container with ID starting with 6aa524a0b9e9e26ee58b43d8bf2c8b2ec26b4417508427938bc109b4e2bb35ec not found: ID does not exist" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.583473 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.590391 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.590754 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.610121 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.611882 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.614510 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.614718 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.615932 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.625904 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.639231 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708189 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708266 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708314 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708349 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708385 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbfr5\" (UniqueName: \"kubernetes.io/projected/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-kube-api-access-rbfr5\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708491 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708522 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th72r\" (UniqueName: \"kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708612 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708768 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.708868 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811299 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811360 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811396 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811432 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbfr5\" (UniqueName: \"kubernetes.io/projected/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-kube-api-access-rbfr5\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811568 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811600 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th72r\" (UniqueName: \"kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811688 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811729 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811761 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.811874 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.813881 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.818224 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.820024 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.820528 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.820896 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.821138 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.821194 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.821438 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.837032 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th72r\" (UniqueName: \"kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r\") pod \"nova-metadata-0\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.838015 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbfr5\" (UniqueName: \"kubernetes.io/projected/fb428793-fd4f-4e29-a7e1-7c5b539d01d7-kube-api-access-rbfr5\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb428793-fd4f-4e29-a7e1-7c5b539d01d7\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.911545 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:17 crc kubenswrapper[4852]: I1201 20:25:17.933361 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:18 crc kubenswrapper[4852]: I1201 20:25:18.338805 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d" path="/var/lib/kubelet/pods/6f8f8f3d-07cd-4720-8d0f-af58a7fb9f7d/volumes" Dec 01 20:25:18 crc kubenswrapper[4852]: I1201 20:25:18.340127 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7da3536d-ea64-4b39-a315-586c31b8bfb3" path="/var/lib/kubelet/pods/7da3536d-ea64-4b39-a315-586c31b8bfb3/volumes" Dec 01 20:25:18 crc kubenswrapper[4852]: I1201 20:25:18.409296 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:18 crc kubenswrapper[4852]: I1201 20:25:18.449913 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerStarted","Data":"87be8a001e37e1ccfe6b88e236b4764b57569df97a401ade7dec9883d44c3aab"} Dec 01 20:25:18 crc kubenswrapper[4852]: I1201 20:25:18.494976 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 20:25:18 crc kubenswrapper[4852]: W1201 20:25:18.495314 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb428793_fd4f_4e29_a7e1_7c5b539d01d7.slice/crio-76c5248d48040ab88f59e1760d5761f41545a3345b95c56bd598da6cb1512a98 WatchSource:0}: Error finding container 76c5248d48040ab88f59e1760d5761f41545a3345b95c56bd598da6cb1512a98: Status 404 returned error can't find the container with id 76c5248d48040ab88f59e1760d5761f41545a3345b95c56bd598da6cb1512a98 Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.469924 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerStarted","Data":"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef"} Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.470512 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerStarted","Data":"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4"} Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.472023 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb428793-fd4f-4e29-a7e1-7c5b539d01d7","Type":"ContainerStarted","Data":"08acc96dab9dd58cd06b2c3c9227e647c8d769331596e7ef8cfc3db29b04a1de"} Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.472097 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb428793-fd4f-4e29-a7e1-7c5b539d01d7","Type":"ContainerStarted","Data":"76c5248d48040ab88f59e1760d5761f41545a3345b95c56bd598da6cb1512a98"} Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.512263 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.512237162 podStartE2EDuration="2.512237162s" podCreationTimestamp="2025-12-01 20:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:19.493571379 +0000 UTC m=+1239.420652806" watchObservedRunningTime="2025-12-01 20:25:19.512237162 +0000 UTC m=+1239.439318599" Dec 01 20:25:19 crc kubenswrapper[4852]: I1201 20:25:19.516919 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.516900029 podStartE2EDuration="2.516900029s" podCreationTimestamp="2025-12-01 20:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:19.51629087 +0000 UTC m=+1239.443372287" watchObservedRunningTime="2025-12-01 20:25:19.516900029 +0000 UTC m=+1239.443981446" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.630624 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.631166 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.631560 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.631667 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.633898 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.634294 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.858194 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.863615 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.898723 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.988064 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rscsr\" (UniqueName: \"kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.988668 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.988715 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.988755 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.988969 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:20 crc kubenswrapper[4852]: I1201 20:25:20.989008 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091208 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091260 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091292 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091351 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091383 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.091439 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rscsr\" (UniqueName: \"kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.094000 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.094260 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.094306 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.094273 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.094854 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.122023 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rscsr\" (UniqueName: \"kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr\") pod \"dnsmasq-dns-6bc646c8f9-bbmgk\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.203497 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:21 crc kubenswrapper[4852]: I1201 20:25:21.715479 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.504298 4852 generic.go:334] "Generic (PLEG): container finished" podID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerID="fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47" exitCode=0 Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.504368 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" event={"ID":"29aaf433-d1a1-4404-9cf7-af6d6982f0fa","Type":"ContainerDied","Data":"fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47"} Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.504764 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" event={"ID":"29aaf433-d1a1-4404-9cf7-af6d6982f0fa","Type":"ContainerStarted","Data":"c9cbd7072788054eba22c1a2eabf5b9ed889c0f95fff85bd001c8c1350522e11"} Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.733499 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.734146 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-central-agent" containerID="cri-o://fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18" gracePeriod=30 Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.734274 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="proxy-httpd" containerID="cri-o://8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2" gracePeriod=30 Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.734326 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="sg-core" containerID="cri-o://c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58" gracePeriod=30 Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.734371 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-notification-agent" containerID="cri-o://37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873" gracePeriod=30 Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.911913 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.911970 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:25:22 crc kubenswrapper[4852]: I1201 20:25:22.935208 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.255385 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.518854 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" event={"ID":"29aaf433-d1a1-4404-9cf7-af6d6982f0fa","Type":"ContainerStarted","Data":"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf"} Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.519010 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530062 4852 generic.go:334] "Generic (PLEG): container finished" podID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerID="8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2" exitCode=0 Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530104 4852 generic.go:334] "Generic (PLEG): container finished" podID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerID="c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58" exitCode=2 Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530115 4852 generic.go:334] "Generic (PLEG): container finished" podID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerID="fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18" exitCode=0 Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530108 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerDied","Data":"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2"} Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530161 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerDied","Data":"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58"} Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530177 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerDied","Data":"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18"} Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530390 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-log" containerID="cri-o://1cbc33d327c5a39e4e3435fc5e270c1b38605e186e2792a14777047c9da93a47" gracePeriod=30 Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.530421 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-api" containerID="cri-o://10040f12a70496222daa324c47e668d24b6fe41d75146052bc9541c4ce7ccb58" gracePeriod=30 Dec 01 20:25:23 crc kubenswrapper[4852]: I1201 20:25:23.554681 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" podStartSLOduration=3.55466189 podStartE2EDuration="3.55466189s" podCreationTimestamp="2025-12-01 20:25:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:23.547622677 +0000 UTC m=+1243.474704104" watchObservedRunningTime="2025-12-01 20:25:23.55466189 +0000 UTC m=+1243.481743307" Dec 01 20:25:24 crc kubenswrapper[4852]: I1201 20:25:24.541303 4852 generic.go:334] "Generic (PLEG): container finished" podID="dc785433-b882-49cf-a19e-36f92751e79c" containerID="1cbc33d327c5a39e4e3435fc5e270c1b38605e186e2792a14777047c9da93a47" exitCode=143 Dec 01 20:25:24 crc kubenswrapper[4852]: I1201 20:25:24.541408 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerDied","Data":"1cbc33d327c5a39e4e3435fc5e270c1b38605e186e2792a14777047c9da93a47"} Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.157034 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.315500 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.315570 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.315824 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.315945 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fvvr\" (UniqueName: \"kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.316319 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.316749 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.316806 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.316832 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.316872 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs\") pod \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\" (UID: \"8c4d07b6-c67e-4233-a005-3c2a7c54805a\") " Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.317298 4852 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.317516 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.323326 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts" (OuterVolumeSpecName: "scripts") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.325300 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr" (OuterVolumeSpecName: "kube-api-access-4fvvr") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "kube-api-access-4fvvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.376877 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.385844 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.419832 4852 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.420287 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.420432 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.420580 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fvvr\" (UniqueName: \"kubernetes.io/projected/8c4d07b6-c67e-4233-a005-3c2a7c54805a-kube-api-access-4fvvr\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.420655 4852 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c4d07b6-c67e-4233-a005-3c2a7c54805a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.421543 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.453900 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data" (OuterVolumeSpecName: "config-data") pod "8c4d07b6-c67e-4233-a005-3c2a7c54805a" (UID: "8c4d07b6-c67e-4233-a005-3c2a7c54805a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.523066 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.523106 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c4d07b6-c67e-4233-a005-3c2a7c54805a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.564572 4852 generic.go:334] "Generic (PLEG): container finished" podID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerID="37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873" exitCode=0 Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.564646 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerDied","Data":"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873"} Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.564710 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.564729 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8c4d07b6-c67e-4233-a005-3c2a7c54805a","Type":"ContainerDied","Data":"a2ed2adce1722ea89b65575982ea8bbb8bfb147c782df0a9484f02271868b0ea"} Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.564758 4852 scope.go:117] "RemoveContainer" containerID="8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.599347 4852 scope.go:117] "RemoveContainer" containerID="c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.606731 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.617829 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.626776 4852 scope.go:117] "RemoveContainer" containerID="37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.645600 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.646022 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="proxy-httpd" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646041 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="proxy-httpd" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.646052 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-central-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646060 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-central-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.646084 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="sg-core" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646092 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="sg-core" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.646137 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-notification-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646144 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-notification-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646320 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="sg-core" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646336 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="proxy-httpd" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646343 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-notification-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.646361 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" containerName="ceilometer-central-agent" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.648142 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.650991 4852 scope.go:117] "RemoveContainer" containerID="fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.651724 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.651731 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.651790 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.657015 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.686741 4852 scope.go:117] "RemoveContainer" containerID="8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.687698 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2\": container with ID starting with 8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2 not found: ID does not exist" containerID="8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.687786 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2"} err="failed to get container status \"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2\": rpc error: code = NotFound desc = could not find container \"8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2\": container with ID starting with 8f9e4fa8bc20a5e5cf1122982bdc8866269b77583f99a98d0b24a4979397f8c2 not found: ID does not exist" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.687817 4852 scope.go:117] "RemoveContainer" containerID="c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.692771 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58\": container with ID starting with c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58 not found: ID does not exist" containerID="c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.692920 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58"} err="failed to get container status \"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58\": rpc error: code = NotFound desc = could not find container \"c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58\": container with ID starting with c15bb27ca72433e1eea4f096db7459d25d47e502cb9698111341f06a67dd3d58 not found: ID does not exist" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.693020 4852 scope.go:117] "RemoveContainer" containerID="37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.694155 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873\": container with ID starting with 37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873 not found: ID does not exist" containerID="37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.694316 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873"} err="failed to get container status \"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873\": rpc error: code = NotFound desc = could not find container \"37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873\": container with ID starting with 37956c7ecfcbb5d8e517e90d31ef589baea6ed6c5915259559fb6f915277f873 not found: ID does not exist" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.694364 4852 scope.go:117] "RemoveContainer" containerID="fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18" Dec 01 20:25:26 crc kubenswrapper[4852]: E1201 20:25:26.695890 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18\": container with ID starting with fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18 not found: ID does not exist" containerID="fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.695924 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18"} err="failed to get container status \"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18\": rpc error: code = NotFound desc = could not find container \"fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18\": container with ID starting with fe8ea413d27750686122c262d0180ff7e61c976fbd1421a78cf4e4e29bf14e18 not found: ID does not exist" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.829791 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.830263 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-run-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.830619 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.830665 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-log-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.830734 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-config-data\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.830944 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2nf7\" (UniqueName: \"kubernetes.io/projected/113952dd-818a-4d1d-a610-789c5cec4238-kube-api-access-j2nf7\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.831167 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.831319 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-scripts\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.932874 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-run-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.932976 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933004 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-log-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933037 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-config-data\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933072 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2nf7\" (UniqueName: \"kubernetes.io/projected/113952dd-818a-4d1d-a610-789c5cec4238-kube-api-access-j2nf7\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933118 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933166 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-scripts\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933213 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.933474 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-run-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.934232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/113952dd-818a-4d1d-a610-789c5cec4238-log-httpd\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.938639 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.939025 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-config-data\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.943515 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-scripts\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.945208 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.945985 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/113952dd-818a-4d1d-a610-789c5cec4238-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.951050 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2nf7\" (UniqueName: \"kubernetes.io/projected/113952dd-818a-4d1d-a610-789c5cec4238-kube-api-access-j2nf7\") pod \"ceilometer-0\" (UID: \"113952dd-818a-4d1d-a610-789c5cec4238\") " pod="openstack/ceilometer-0" Dec 01 20:25:26 crc kubenswrapper[4852]: I1201 20:25:26.970123 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 20:25:27 crc kubenswrapper[4852]: W1201 20:25:27.466311 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod113952dd_818a_4d1d_a610_789c5cec4238.slice/crio-4369148d8488545a8e4707e6662688578c43acd3df88e0e989c2cbea9683c8f7 WatchSource:0}: Error finding container 4369148d8488545a8e4707e6662688578c43acd3df88e0e989c2cbea9683c8f7: Status 404 returned error can't find the container with id 4369148d8488545a8e4707e6662688578c43acd3df88e0e989c2cbea9683c8f7 Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.471132 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.584875 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"113952dd-818a-4d1d-a610-789c5cec4238","Type":"ContainerStarted","Data":"4369148d8488545a8e4707e6662688578c43acd3df88e0e989c2cbea9683c8f7"} Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.912664 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.912725 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.934274 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:27 crc kubenswrapper[4852]: I1201 20:25:27.972857 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.333052 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c4d07b6-c67e-4233-a005-3c2a7c54805a" path="/var/lib/kubelet/pods/8c4d07b6-c67e-4233-a005-3c2a7c54805a/volumes" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.601391 4852 generic.go:334] "Generic (PLEG): container finished" podID="dc785433-b882-49cf-a19e-36f92751e79c" containerID="10040f12a70496222daa324c47e668d24b6fe41d75146052bc9541c4ce7ccb58" exitCode=0 Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.602367 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerDied","Data":"10040f12a70496222daa324c47e668d24b6fe41d75146052bc9541c4ce7ccb58"} Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.620236 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.928731 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.928746 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.965098 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-lvjr4"] Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.966555 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.969432 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.969631 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 01 20:25:28 crc kubenswrapper[4852]: I1201 20:25:28.978857 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvjr4"] Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.078672 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.078754 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.078860 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrp6n\" (UniqueName: \"kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.078896 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.168184 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.180330 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrp6n\" (UniqueName: \"kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.180410 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.180552 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.180575 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.194722 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.202981 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.205871 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.207634 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrp6n\" (UniqueName: \"kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n\") pod \"nova-cell1-cell-mapping-lvjr4\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.281938 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle\") pod \"dc785433-b882-49cf-a19e-36f92751e79c\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.282025 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llgxk\" (UniqueName: \"kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk\") pod \"dc785433-b882-49cf-a19e-36f92751e79c\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.282090 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs\") pod \"dc785433-b882-49cf-a19e-36f92751e79c\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.282149 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data\") pod \"dc785433-b882-49cf-a19e-36f92751e79c\" (UID: \"dc785433-b882-49cf-a19e-36f92751e79c\") " Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.283098 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs" (OuterVolumeSpecName: "logs") pod "dc785433-b882-49cf-a19e-36f92751e79c" (UID: "dc785433-b882-49cf-a19e-36f92751e79c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.295429 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk" (OuterVolumeSpecName: "kube-api-access-llgxk") pod "dc785433-b882-49cf-a19e-36f92751e79c" (UID: "dc785433-b882-49cf-a19e-36f92751e79c"). InnerVolumeSpecName "kube-api-access-llgxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.301856 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.327118 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc785433-b882-49cf-a19e-36f92751e79c" (UID: "dc785433-b882-49cf-a19e-36f92751e79c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.331523 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data" (OuterVolumeSpecName: "config-data") pod "dc785433-b882-49cf-a19e-36f92751e79c" (UID: "dc785433-b882-49cf-a19e-36f92751e79c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.385324 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.385362 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llgxk\" (UniqueName: \"kubernetes.io/projected/dc785433-b882-49cf-a19e-36f92751e79c-kube-api-access-llgxk\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.385379 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc785433-b882-49cf-a19e-36f92751e79c-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.385391 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc785433-b882-49cf-a19e-36f92751e79c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.618222 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc785433-b882-49cf-a19e-36f92751e79c","Type":"ContainerDied","Data":"b0690cb42300fadb18353e8e66bae3f89bca78ae4a6451840d1b35a3ba5f16b4"} Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.618649 4852 scope.go:117] "RemoveContainer" containerID="10040f12a70496222daa324c47e668d24b6fe41d75146052bc9541c4ce7ccb58" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.618281 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.632666 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"113952dd-818a-4d1d-a610-789c5cec4238","Type":"ContainerStarted","Data":"2de3ae07bb70df58747b1adc82e29780167604d94c830220879c1dfb3d8f6c80"} Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.648491 4852 scope.go:117] "RemoveContainer" containerID="1cbc33d327c5a39e4e3435fc5e270c1b38605e186e2792a14777047c9da93a47" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.676007 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.685957 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.745362 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:29 crc kubenswrapper[4852]: E1201 20:25:29.747047 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-api" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.747076 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-api" Dec 01 20:25:29 crc kubenswrapper[4852]: E1201 20:25:29.747166 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-log" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.747182 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-log" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.754532 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-log" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.754669 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc785433-b882-49cf-a19e-36f92751e79c" containerName="nova-api-api" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.767421 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.770512 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.775918 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.776341 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.779309 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796080 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796203 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796228 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p9cz\" (UniqueName: \"kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796261 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796349 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.796392 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.832990 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvjr4"] Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.898986 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.899032 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p9cz\" (UniqueName: \"kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.899107 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.902724 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.903766 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.903898 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.904220 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.904327 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.904744 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.907103 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.908929 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:29 crc kubenswrapper[4852]: I1201 20:25:29.922475 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p9cz\" (UniqueName: \"kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz\") pod \"nova-api-0\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " pod="openstack/nova-api-0" Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.118326 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.335573 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc785433-b882-49cf-a19e-36f92751e79c" path="/var/lib/kubelet/pods/dc785433-b882-49cf-a19e-36f92751e79c/volumes" Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.421601 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:30 crc kubenswrapper[4852]: W1201 20:25:30.477248 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice/crio-ed0e73303e69cc1e3a54d384ff4babae15f3f2384e2694d89d0622a7b14dc372 WatchSource:0}: Error finding container ed0e73303e69cc1e3a54d384ff4babae15f3f2384e2694d89d0622a7b14dc372: Status 404 returned error can't find the container with id ed0e73303e69cc1e3a54d384ff4babae15f3f2384e2694d89d0622a7b14dc372 Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.653753 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvjr4" event={"ID":"33037a1a-8d04-4fe2-bbe2-cba894655514","Type":"ContainerStarted","Data":"63bc5381838417996a2f71982308978172f3a57c81907a974f3246153fcaef90"} Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.654217 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvjr4" event={"ID":"33037a1a-8d04-4fe2-bbe2-cba894655514","Type":"ContainerStarted","Data":"59742873fd6f882f2797d8932235a2f752b127ffe5d4408ea536c4bb04f16757"} Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.658728 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerStarted","Data":"ed0e73303e69cc1e3a54d384ff4babae15f3f2384e2694d89d0622a7b14dc372"} Dec 01 20:25:30 crc kubenswrapper[4852]: I1201 20:25:30.681599 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-lvjr4" podStartSLOduration=2.681575843 podStartE2EDuration="2.681575843s" podCreationTimestamp="2025-12-01 20:25:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:30.67419938 +0000 UTC m=+1250.601280807" watchObservedRunningTime="2025-12-01 20:25:30.681575843 +0000 UTC m=+1250.608657260" Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.207673 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.287170 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.287489 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="dnsmasq-dns" containerID="cri-o://a53ec01fb098442e31e8010061eea15998b5773c119c97453efb5da80fe5e8b1" gracePeriod=10 Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.682387 4852 generic.go:334] "Generic (PLEG): container finished" podID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerID="a53ec01fb098442e31e8010061eea15998b5773c119c97453efb5da80fe5e8b1" exitCode=0 Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.682688 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" event={"ID":"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36","Type":"ContainerDied","Data":"a53ec01fb098442e31e8010061eea15998b5773c119c97453efb5da80fe5e8b1"} Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.698246 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerStarted","Data":"d5198d6188da6bdff4c012847680bfbaf16107ac50aa223850bca59a2d3cdc3e"} Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.698324 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerStarted","Data":"21461077add605d4eae4b83c5f3767c2f4ec8221967006aeb831eaa93365e2ea"} Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.709229 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"113952dd-818a-4d1d-a610-789c5cec4238","Type":"ContainerStarted","Data":"4b9b52735aef382e602a3e320a4c987af80e81b71f350d9ead6422f2e110b224"} Dec 01 20:25:31 crc kubenswrapper[4852]: I1201 20:25:31.736704 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.736681098 podStartE2EDuration="2.736681098s" podCreationTimestamp="2025-12-01 20:25:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:31.729721628 +0000 UTC m=+1251.656803065" watchObservedRunningTime="2025-12-01 20:25:31.736681098 +0000 UTC m=+1251.663762515" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.382870 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.574472 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.575089 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.575246 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz89k\" (UniqueName: \"kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.575278 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.575345 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.575399 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb\") pod \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\" (UID: \"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36\") " Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.583230 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k" (OuterVolumeSpecName: "kube-api-access-zz89k") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "kube-api-access-zz89k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.632792 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.634477 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.635239 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.644294 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.647329 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config" (OuterVolumeSpecName: "config") pod "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" (UID: "f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678330 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz89k\" (UniqueName: \"kubernetes.io/projected/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-kube-api-access-zz89k\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678362 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678370 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678379 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678388 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.678396 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.719220 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"113952dd-818a-4d1d-a610-789c5cec4238","Type":"ContainerStarted","Data":"3f9e50630b3317044369329c432b27ba91d10ca0e616c7bafbc5e814da3e2354"} Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.721769 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.721768 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" event={"ID":"f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36","Type":"ContainerDied","Data":"ecee5814455bab16670e77fba436a20049604887c4a853f6ba4761e2b42a3d81"} Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.721838 4852 scope.go:117] "RemoveContainer" containerID="a53ec01fb098442e31e8010061eea15998b5773c119c97453efb5da80fe5e8b1" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.750920 4852 scope.go:117] "RemoveContainer" containerID="fbe3bf4c975f4aa68644891a094272236e06b166ea34de3fae2005726bd64096" Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.766299 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:25:32 crc kubenswrapper[4852]: I1201 20:25:32.775109 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75df6cf455-xldr7"] Dec 01 20:25:34 crc kubenswrapper[4852]: I1201 20:25:34.338379 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" path="/var/lib/kubelet/pods/f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36/volumes" Dec 01 20:25:34 crc kubenswrapper[4852]: I1201 20:25:34.757162 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"113952dd-818a-4d1d-a610-789c5cec4238","Type":"ContainerStarted","Data":"000d86fe0a3acdf9ef89afff79ca101edc1e978bea78d245c2daa768f8ff5094"} Dec 01 20:25:34 crc kubenswrapper[4852]: I1201 20:25:34.757741 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 20:25:34 crc kubenswrapper[4852]: I1201 20:25:34.792578 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.264169054 podStartE2EDuration="8.792555895s" podCreationTimestamp="2025-12-01 20:25:26 +0000 UTC" firstStartedPulling="2025-12-01 20:25:27.469325659 +0000 UTC m=+1247.396407086" lastFinishedPulling="2025-12-01 20:25:33.99771251 +0000 UTC m=+1253.924793927" observedRunningTime="2025-12-01 20:25:34.788556399 +0000 UTC m=+1254.715637816" watchObservedRunningTime="2025-12-01 20:25:34.792555895 +0000 UTC m=+1254.719637332" Dec 01 20:25:36 crc kubenswrapper[4852]: I1201 20:25:36.780481 4852 generic.go:334] "Generic (PLEG): container finished" podID="33037a1a-8d04-4fe2-bbe2-cba894655514" containerID="63bc5381838417996a2f71982308978172f3a57c81907a974f3246153fcaef90" exitCode=0 Dec 01 20:25:36 crc kubenswrapper[4852]: I1201 20:25:36.780696 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvjr4" event={"ID":"33037a1a-8d04-4fe2-bbe2-cba894655514","Type":"ContainerDied","Data":"63bc5381838417996a2f71982308978172f3a57c81907a974f3246153fcaef90"} Dec 01 20:25:37 crc kubenswrapper[4852]: I1201 20:25:37.279275 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-75df6cf455-xldr7" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.187:5353: i/o timeout" Dec 01 20:25:37 crc kubenswrapper[4852]: I1201 20:25:37.919723 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 20:25:37 crc kubenswrapper[4852]: I1201 20:25:37.930129 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 20:25:37 crc kubenswrapper[4852]: I1201 20:25:37.931493 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.274542 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.338091 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts\") pod \"33037a1a-8d04-4fe2-bbe2-cba894655514\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.338256 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data\") pod \"33037a1a-8d04-4fe2-bbe2-cba894655514\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.338291 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrp6n\" (UniqueName: \"kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n\") pod \"33037a1a-8d04-4fe2-bbe2-cba894655514\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.338391 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle\") pod \"33037a1a-8d04-4fe2-bbe2-cba894655514\" (UID: \"33037a1a-8d04-4fe2-bbe2-cba894655514\") " Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.347782 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n" (OuterVolumeSpecName: "kube-api-access-qrp6n") pod "33037a1a-8d04-4fe2-bbe2-cba894655514" (UID: "33037a1a-8d04-4fe2-bbe2-cba894655514"). InnerVolumeSpecName "kube-api-access-qrp6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.347998 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts" (OuterVolumeSpecName: "scripts") pod "33037a1a-8d04-4fe2-bbe2-cba894655514" (UID: "33037a1a-8d04-4fe2-bbe2-cba894655514"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.379520 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data" (OuterVolumeSpecName: "config-data") pod "33037a1a-8d04-4fe2-bbe2-cba894655514" (UID: "33037a1a-8d04-4fe2-bbe2-cba894655514"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.380008 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33037a1a-8d04-4fe2-bbe2-cba894655514" (UID: "33037a1a-8d04-4fe2-bbe2-cba894655514"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.441295 4852 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.441345 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.441364 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrp6n\" (UniqueName: \"kubernetes.io/projected/33037a1a-8d04-4fe2-bbe2-cba894655514-kube-api-access-qrp6n\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.441379 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33037a1a-8d04-4fe2-bbe2-cba894655514-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.818908 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvjr4" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.823832 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvjr4" event={"ID":"33037a1a-8d04-4fe2-bbe2-cba894655514","Type":"ContainerDied","Data":"59742873fd6f882f2797d8932235a2f752b127ffe5d4408ea536c4bb04f16757"} Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.823909 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59742873fd6f882f2797d8932235a2f752b127ffe5d4408ea536c4bb04f16757" Dec 01 20:25:38 crc kubenswrapper[4852]: I1201 20:25:38.840720 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.040849 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.041179 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-log" containerID="cri-o://21461077add605d4eae4b83c5f3767c2f4ec8221967006aeb831eaa93365e2ea" gracePeriod=30 Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.041314 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-api" containerID="cri-o://d5198d6188da6bdff4c012847680bfbaf16107ac50aa223850bca59a2d3cdc3e" gracePeriod=30 Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.061301 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.062168 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerName="nova-scheduler-scheduler" containerID="cri-o://3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" gracePeriod=30 Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.083385 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:39 crc kubenswrapper[4852]: E1201 20:25:39.699904 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:25:39 crc kubenswrapper[4852]: E1201 20:25:39.703106 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:25:39 crc kubenswrapper[4852]: E1201 20:25:39.705044 4852 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 20:25:39 crc kubenswrapper[4852]: E1201 20:25:39.705130 4852 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerName="nova-scheduler-scheduler" Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.843254 4852 generic.go:334] "Generic (PLEG): container finished" podID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerID="d5198d6188da6bdff4c012847680bfbaf16107ac50aa223850bca59a2d3cdc3e" exitCode=0 Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.843735 4852 generic.go:334] "Generic (PLEG): container finished" podID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerID="21461077add605d4eae4b83c5f3767c2f4ec8221967006aeb831eaa93365e2ea" exitCode=143 Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.843746 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerDied","Data":"d5198d6188da6bdff4c012847680bfbaf16107ac50aa223850bca59a2d3cdc3e"} Dec 01 20:25:39 crc kubenswrapper[4852]: I1201 20:25:39.843816 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerDied","Data":"21461077add605d4eae4b83c5f3767c2f4ec8221967006aeb831eaa93365e2ea"} Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.131673 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183240 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183472 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183569 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183634 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9p9cz\" (UniqueName: \"kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183738 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.183773 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs\") pod \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\" (UID: \"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674\") " Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.184637 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs" (OuterVolumeSpecName: "logs") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.192665 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz" (OuterVolumeSpecName: "kube-api-access-9p9cz") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "kube-api-access-9p9cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.226708 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data" (OuterVolumeSpecName: "config-data") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.227751 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.259763 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.263791 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" (UID: "7ff27233-f4e2-46f3-9f02-a9cf7cd3e674"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289142 4852 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289691 4852 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289713 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289736 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289747 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.289759 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9p9cz\" (UniqueName: \"kubernetes.io/projected/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674-kube-api-access-9p9cz\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.854400 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7ff27233-f4e2-46f3-9f02-a9cf7cd3e674","Type":"ContainerDied","Data":"ed0e73303e69cc1e3a54d384ff4babae15f3f2384e2694d89d0622a7b14dc372"} Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.854848 4852 scope.go:117] "RemoveContainer" containerID="d5198d6188da6bdff4c012847680bfbaf16107ac50aa223850bca59a2d3cdc3e" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.854639 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" containerID="cri-o://52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4" gracePeriod=30 Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.854717 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" containerID="cri-o://6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef" gracePeriod=30 Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.854442 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.889138 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.896811 4852 scope.go:117] "RemoveContainer" containerID="21461077add605d4eae4b83c5f3767c2f4ec8221967006aeb831eaa93365e2ea" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.909535 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.930964 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:40 crc kubenswrapper[4852]: E1201 20:25:40.931564 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="init" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931589 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="init" Dec 01 20:25:40 crc kubenswrapper[4852]: E1201 20:25:40.931638 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="dnsmasq-dns" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931647 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="dnsmasq-dns" Dec 01 20:25:40 crc kubenswrapper[4852]: E1201 20:25:40.931669 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33037a1a-8d04-4fe2-bbe2-cba894655514" containerName="nova-manage" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931679 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="33037a1a-8d04-4fe2-bbe2-cba894655514" containerName="nova-manage" Dec 01 20:25:40 crc kubenswrapper[4852]: E1201 20:25:40.931691 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-log" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931700 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-log" Dec 01 20:25:40 crc kubenswrapper[4852]: E1201 20:25:40.931723 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-api" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931732 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-api" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931965 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-api" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.931992 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17bc8cc-8d01-4b2d-92ea-2f92fc4f4b36" containerName="dnsmasq-dns" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.932006 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" containerName="nova-api-log" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.932017 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="33037a1a-8d04-4fe2-bbe2-cba894655514" containerName="nova-manage" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.933396 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.942228 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.942903 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.943202 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 20:25:40 crc kubenswrapper[4852]: I1201 20:25:40.943896 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.003683 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.003916 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzs8s\" (UniqueName: \"kubernetes.io/projected/623fc41b-9221-407d-a5d7-e59ce151725a-kube-api-access-pzs8s\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.003950 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-config-data\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.003974 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623fc41b-9221-407d-a5d7-e59ce151725a-logs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.004006 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-public-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.004466 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106719 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106823 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106884 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzs8s\" (UniqueName: \"kubernetes.io/projected/623fc41b-9221-407d-a5d7-e59ce151725a-kube-api-access-pzs8s\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106903 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-config-data\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106925 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623fc41b-9221-407d-a5d7-e59ce151725a-logs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.106956 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-public-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.107417 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623fc41b-9221-407d-a5d7-e59ce151725a-logs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.114193 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-public-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.114231 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.114569 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-config-data\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.115752 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623fc41b-9221-407d-a5d7-e59ce151725a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.126577 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzs8s\" (UniqueName: \"kubernetes.io/projected/623fc41b-9221-407d-a5d7-e59ce151725a-kube-api-access-pzs8s\") pod \"nova-api-0\" (UID: \"623fc41b-9221-407d-a5d7-e59ce151725a\") " pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.258486 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.744126 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 20:25:41 crc kubenswrapper[4852]: W1201 20:25:41.747511 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod623fc41b_9221_407d_a5d7_e59ce151725a.slice/crio-8b224ee6bacf10ae6aa34bdc7bf4ca9f0b665d03ed70c12a6c098e471274628e WatchSource:0}: Error finding container 8b224ee6bacf10ae6aa34bdc7bf4ca9f0b665d03ed70c12a6c098e471274628e: Status 404 returned error can't find the container with id 8b224ee6bacf10ae6aa34bdc7bf4ca9f0b665d03ed70c12a6c098e471274628e Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.868429 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"623fc41b-9221-407d-a5d7-e59ce151725a","Type":"ContainerStarted","Data":"8b224ee6bacf10ae6aa34bdc7bf4ca9f0b665d03ed70c12a6c098e471274628e"} Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.871882 4852 generic.go:334] "Generic (PLEG): container finished" podID="ce26c0a3-a403-4862-9464-16f762fe0188" containerID="52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4" exitCode=143 Dec 01 20:25:41 crc kubenswrapper[4852]: I1201 20:25:41.871910 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerDied","Data":"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4"} Dec 01 20:25:42 crc kubenswrapper[4852]: E1201 20:25:42.120380 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:25:42 crc kubenswrapper[4852]: I1201 20:25:42.335570 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff27233-f4e2-46f3-9f02-a9cf7cd3e674" path="/var/lib/kubelet/pods/7ff27233-f4e2-46f3-9f02-a9cf7cd3e674/volumes" Dec 01 20:25:42 crc kubenswrapper[4852]: I1201 20:25:42.885637 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"623fc41b-9221-407d-a5d7-e59ce151725a","Type":"ContainerStarted","Data":"e73e874ec576620bf7610a0c904142d68e41f2dd5d8677371ee2780248ee29d2"} Dec 01 20:25:42 crc kubenswrapper[4852]: I1201 20:25:42.885689 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"623fc41b-9221-407d-a5d7-e59ce151725a","Type":"ContainerStarted","Data":"a4d2af18001bb3cc50f4a1b857b8361d7cdd992cc3d75015a5e0778452df4488"} Dec 01 20:25:42 crc kubenswrapper[4852]: I1201 20:25:42.909119 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.9091035979999997 podStartE2EDuration="2.909103598s" podCreationTimestamp="2025-12-01 20:25:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:42.907384534 +0000 UTC m=+1262.834465961" watchObservedRunningTime="2025-12-01 20:25:42.909103598 +0000 UTC m=+1262.836185015" Dec 01 20:25:43 crc kubenswrapper[4852]: I1201 20:25:43.992913 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:52120->10.217.0.193:8775: read: connection reset by peer" Dec 01 20:25:43 crc kubenswrapper[4852]: I1201 20:25:43.992935 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:52104->10.217.0.193:8775: read: connection reset by peer" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.598485 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.607052 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.688944 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs\") pod \"ce26c0a3-a403-4862-9464-16f762fe0188\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689007 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data\") pod \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689113 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data\") pod \"ce26c0a3-a403-4862-9464-16f762fe0188\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689154 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle\") pod \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689214 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th72r\" (UniqueName: \"kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r\") pod \"ce26c0a3-a403-4862-9464-16f762fe0188\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689303 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrbjx\" (UniqueName: \"kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx\") pod \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\" (UID: \"c07a26c9-d5fd-47c7-a98b-c2753e892efc\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689345 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs\") pod \"ce26c0a3-a403-4862-9464-16f762fe0188\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.689481 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle\") pod \"ce26c0a3-a403-4862-9464-16f762fe0188\" (UID: \"ce26c0a3-a403-4862-9464-16f762fe0188\") " Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.697957 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs" (OuterVolumeSpecName: "logs") pod "ce26c0a3-a403-4862-9464-16f762fe0188" (UID: "ce26c0a3-a403-4862-9464-16f762fe0188"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.702096 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx" (OuterVolumeSpecName: "kube-api-access-lrbjx") pod "c07a26c9-d5fd-47c7-a98b-c2753e892efc" (UID: "c07a26c9-d5fd-47c7-a98b-c2753e892efc"). InnerVolumeSpecName "kube-api-access-lrbjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.705310 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r" (OuterVolumeSpecName: "kube-api-access-th72r") pod "ce26c0a3-a403-4862-9464-16f762fe0188" (UID: "ce26c0a3-a403-4862-9464-16f762fe0188"). InnerVolumeSpecName "kube-api-access-th72r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.726105 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce26c0a3-a403-4862-9464-16f762fe0188" (UID: "ce26c0a3-a403-4862-9464-16f762fe0188"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.730857 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c07a26c9-d5fd-47c7-a98b-c2753e892efc" (UID: "c07a26c9-d5fd-47c7-a98b-c2753e892efc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.739207 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data" (OuterVolumeSpecName: "config-data") pod "ce26c0a3-a403-4862-9464-16f762fe0188" (UID: "ce26c0a3-a403-4862-9464-16f762fe0188"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.747896 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data" (OuterVolumeSpecName: "config-data") pod "c07a26c9-d5fd-47c7-a98b-c2753e892efc" (UID: "c07a26c9-d5fd-47c7-a98b-c2753e892efc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.781345 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ce26c0a3-a403-4862-9464-16f762fe0188" (UID: "ce26c0a3-a403-4862-9464-16f762fe0188"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792486 4852 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792526 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792574 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792586 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c07a26c9-d5fd-47c7-a98b-c2753e892efc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792598 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th72r\" (UniqueName: \"kubernetes.io/projected/ce26c0a3-a403-4862-9464-16f762fe0188-kube-api-access-th72r\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792608 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrbjx\" (UniqueName: \"kubernetes.io/projected/c07a26c9-d5fd-47c7-a98b-c2753e892efc-kube-api-access-lrbjx\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792621 4852 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce26c0a3-a403-4862-9464-16f762fe0188-logs\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.792649 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce26c0a3-a403-4862-9464-16f762fe0188-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.909602 4852 generic.go:334] "Generic (PLEG): container finished" podID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" exitCode=0 Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.909700 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c07a26c9-d5fd-47c7-a98b-c2753e892efc","Type":"ContainerDied","Data":"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e"} Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.909748 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c07a26c9-d5fd-47c7-a98b-c2753e892efc","Type":"ContainerDied","Data":"ed4b26e1402536a0abb71b9100a85691eb36e51b22d4dd35424645db67b1d077"} Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.909794 4852 scope.go:117] "RemoveContainer" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.910106 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.912917 4852 generic.go:334] "Generic (PLEG): container finished" podID="ce26c0a3-a403-4862-9464-16f762fe0188" containerID="6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef" exitCode=0 Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.912951 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerDied","Data":"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef"} Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.912972 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce26c0a3-a403-4862-9464-16f762fe0188","Type":"ContainerDied","Data":"87be8a001e37e1ccfe6b88e236b4764b57569df97a401ade7dec9883d44c3aab"} Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.913003 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.941943 4852 scope.go:117] "RemoveContainer" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" Dec 01 20:25:44 crc kubenswrapper[4852]: E1201 20:25:44.942691 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e\": container with ID starting with 3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e not found: ID does not exist" containerID="3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.942721 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e"} err="failed to get container status \"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e\": rpc error: code = NotFound desc = could not find container \"3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e\": container with ID starting with 3a0ea852c9bb5335aab0f9d31f40aae3c61ca9d5f38dae86cad8017e6f0e891e not found: ID does not exist" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.942748 4852 scope.go:117] "RemoveContainer" containerID="6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef" Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.947179 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.960201 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:44 crc kubenswrapper[4852]: I1201 20:25:44.974629 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.015148 4852 scope.go:117] "RemoveContainer" containerID="52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.034343 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.052883 4852 scope.go:117] "RemoveContainer" containerID="6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.053049 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: E1201 20:25:45.053621 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef\": container with ID starting with 6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef not found: ID does not exist" containerID="6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.053678 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef"} err="failed to get container status \"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef\": rpc error: code = NotFound desc = could not find container \"6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef\": container with ID starting with 6babf0d80c38cd4a6a673c16ef62708f263b1423a37f03f249259b28e109f2ef not found: ID does not exist" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.053713 4852 scope.go:117] "RemoveContainer" containerID="52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4" Dec 01 20:25:45 crc kubenswrapper[4852]: E1201 20:25:45.054126 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerName="nova-scheduler-scheduler" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054162 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerName="nova-scheduler-scheduler" Dec 01 20:25:45 crc kubenswrapper[4852]: E1201 20:25:45.054187 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054196 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" Dec 01 20:25:45 crc kubenswrapper[4852]: E1201 20:25:45.054236 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054247 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" Dec 01 20:25:45 crc kubenswrapper[4852]: E1201 20:25:45.054249 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4\": container with ID starting with 52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4 not found: ID does not exist" containerID="52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054319 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4"} err="failed to get container status \"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4\": rpc error: code = NotFound desc = could not find container \"52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4\": container with ID starting with 52b2082c9ac5bf6d7ac12497883531179a04f64e20f629448ce08437f18389d4 not found: ID does not exist" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054571 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" containerName="nova-scheduler-scheduler" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054616 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-log" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.054632 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" containerName="nova-metadata-metadata" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.055958 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.059311 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.066785 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.071309 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.073646 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.074075 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.094917 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.104893 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-config-data\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.104948 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.104976 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b254248d-f4d2-454d-bc92-09e0d709a0b8-logs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.105049 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.105068 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-config-data\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.105135 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.105158 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gmrj\" (UniqueName: \"kubernetes.io/projected/f04ccdb1-afe8-4f1c-b475-b10384993bdc-kube-api-access-6gmrj\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.105177 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn8jj\" (UniqueName: \"kubernetes.io/projected/b254248d-f4d2-454d-bc92-09e0d709a0b8-kube-api-access-nn8jj\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.107664 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.206748 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.207301 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b254248d-f4d2-454d-bc92-09e0d709a0b8-logs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.207507 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.207652 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-config-data\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.207813 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.207922 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gmrj\" (UniqueName: \"kubernetes.io/projected/f04ccdb1-afe8-4f1c-b475-b10384993bdc-kube-api-access-6gmrj\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.208038 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn8jj\" (UniqueName: \"kubernetes.io/projected/b254248d-f4d2-454d-bc92-09e0d709a0b8-kube-api-access-nn8jj\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.208085 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b254248d-f4d2-454d-bc92-09e0d709a0b8-logs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.208352 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-config-data\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.212080 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.213091 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f04ccdb1-afe8-4f1c-b475-b10384993bdc-config-data\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.214925 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.216004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-config-data\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.216031 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b254248d-f4d2-454d-bc92-09e0d709a0b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.227359 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gmrj\" (UniqueName: \"kubernetes.io/projected/f04ccdb1-afe8-4f1c-b475-b10384993bdc-kube-api-access-6gmrj\") pod \"nova-scheduler-0\" (UID: \"f04ccdb1-afe8-4f1c-b475-b10384993bdc\") " pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.231026 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn8jj\" (UniqueName: \"kubernetes.io/projected/b254248d-f4d2-454d-bc92-09e0d709a0b8-kube-api-access-nn8jj\") pod \"nova-metadata-0\" (UID: \"b254248d-f4d2-454d-bc92-09e0d709a0b8\") " pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.447150 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.455581 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.941140 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 20:25:45 crc kubenswrapper[4852]: W1201 20:25:45.945339 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf04ccdb1_afe8_4f1c_b475_b10384993bdc.slice/crio-787bb0a6597c74cc681a00faaf16c0795a64414423783bda83d183e533716e66 WatchSource:0}: Error finding container 787bb0a6597c74cc681a00faaf16c0795a64414423783bda83d183e533716e66: Status 404 returned error can't find the container with id 787bb0a6597c74cc681a00faaf16c0795a64414423783bda83d183e533716e66 Dec 01 20:25:45 crc kubenswrapper[4852]: W1201 20:25:45.948147 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb254248d_f4d2_454d_bc92_09e0d709a0b8.slice/crio-be1fb8267e22592190e3213edffd04fcc1240be23db764023e843b4508189751 WatchSource:0}: Error finding container be1fb8267e22592190e3213edffd04fcc1240be23db764023e843b4508189751: Status 404 returned error can't find the container with id be1fb8267e22592190e3213edffd04fcc1240be23db764023e843b4508189751 Dec 01 20:25:45 crc kubenswrapper[4852]: I1201 20:25:45.953223 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.336856 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c07a26c9-d5fd-47c7-a98b-c2753e892efc" path="/var/lib/kubelet/pods/c07a26c9-d5fd-47c7-a98b-c2753e892efc/volumes" Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.337717 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce26c0a3-a403-4862-9464-16f762fe0188" path="/var/lib/kubelet/pods/ce26c0a3-a403-4862-9464-16f762fe0188/volumes" Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.948397 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f04ccdb1-afe8-4f1c-b475-b10384993bdc","Type":"ContainerStarted","Data":"1478d7efa868ac42ad6d7703d858ec4b494f12023dc886816f5f10767cc0368f"} Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.948498 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f04ccdb1-afe8-4f1c-b475-b10384993bdc","Type":"ContainerStarted","Data":"787bb0a6597c74cc681a00faaf16c0795a64414423783bda83d183e533716e66"} Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.950746 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b254248d-f4d2-454d-bc92-09e0d709a0b8","Type":"ContainerStarted","Data":"63d87214cf9812f60591df41687611eb2a067920f45df6b8cc2dff8eddb272f9"} Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.950783 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b254248d-f4d2-454d-bc92-09e0d709a0b8","Type":"ContainerStarted","Data":"963d3b9b41f8520029cb2e99db12b45ff47b7522b615485b9c6be5594936041e"} Dec 01 20:25:46 crc kubenswrapper[4852]: I1201 20:25:46.950799 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b254248d-f4d2-454d-bc92-09e0d709a0b8","Type":"ContainerStarted","Data":"be1fb8267e22592190e3213edffd04fcc1240be23db764023e843b4508189751"} Dec 01 20:25:47 crc kubenswrapper[4852]: I1201 20:25:46.998699 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.998671443 podStartE2EDuration="2.998671443s" podCreationTimestamp="2025-12-01 20:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:46.979651259 +0000 UTC m=+1266.906732746" watchObservedRunningTime="2025-12-01 20:25:46.998671443 +0000 UTC m=+1266.925752890" Dec 01 20:25:47 crc kubenswrapper[4852]: I1201 20:25:47.008498 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.008478453 podStartE2EDuration="3.008478453s" podCreationTimestamp="2025-12-01 20:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:25:46.999674244 +0000 UTC m=+1266.926755701" watchObservedRunningTime="2025-12-01 20:25:47.008478453 +0000 UTC m=+1266.935559860" Dec 01 20:25:50 crc kubenswrapper[4852]: I1201 20:25:50.447522 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 20:25:50 crc kubenswrapper[4852]: I1201 20:25:50.455924 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:25:50 crc kubenswrapper[4852]: I1201 20:25:50.456044 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 20:25:51 crc kubenswrapper[4852]: I1201 20:25:51.258898 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:25:51 crc kubenswrapper[4852]: I1201 20:25:51.258958 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 20:25:52 crc kubenswrapper[4852]: I1201 20:25:52.274646 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="623fc41b-9221-407d-a5d7-e59ce151725a" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:52 crc kubenswrapper[4852]: I1201 20:25:52.274766 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="623fc41b-9221-407d-a5d7-e59ce151725a" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:52 crc kubenswrapper[4852]: E1201 20:25:52.440613 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:25:55 crc kubenswrapper[4852]: I1201 20:25:55.448492 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 20:25:55 crc kubenswrapper[4852]: I1201 20:25:55.456568 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 20:25:55 crc kubenswrapper[4852]: I1201 20:25:55.456655 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 20:25:55 crc kubenswrapper[4852]: I1201 20:25:55.488415 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 20:25:56 crc kubenswrapper[4852]: I1201 20:25:56.107913 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 20:25:56 crc kubenswrapper[4852]: I1201 20:25:56.472883 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b254248d-f4d2-454d-bc92-09e0d709a0b8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:56 crc kubenswrapper[4852]: I1201 20:25:56.472875 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b254248d-f4d2-454d-bc92-09e0d709a0b8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 20:25:56 crc kubenswrapper[4852]: I1201 20:25:56.989359 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.267808 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.268578 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.269028 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.269278 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.275449 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 20:26:01 crc kubenswrapper[4852]: I1201 20:26:01.281147 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 20:26:02 crc kubenswrapper[4852]: E1201 20:26:02.702480 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:26:05 crc kubenswrapper[4852]: I1201 20:26:05.464226 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 20:26:05 crc kubenswrapper[4852]: I1201 20:26:05.468195 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 20:26:05 crc kubenswrapper[4852]: I1201 20:26:05.479724 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 20:26:06 crc kubenswrapper[4852]: I1201 20:26:06.191157 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 20:26:12 crc kubenswrapper[4852]: E1201 20:26:12.986218 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:26:14 crc kubenswrapper[4852]: I1201 20:26:14.550774 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:16 crc kubenswrapper[4852]: I1201 20:26:16.277106 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:19 crc kubenswrapper[4852]: I1201 20:26:19.983374 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="rabbitmq" containerID="cri-o://93c507c659599d436d50bcc26bd42033cddc5958ea2eddf7768e09be98c42fb9" gracePeriod=604795 Dec 01 20:26:20 crc kubenswrapper[4852]: I1201 20:26:20.230175 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:26:20 crc kubenswrapper[4852]: I1201 20:26:20.230241 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:26:21 crc kubenswrapper[4852]: I1201 20:26:21.354163 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="rabbitmq" containerID="cri-o://774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb" gracePeriod=604795 Dec 01 20:26:23 crc kubenswrapper[4852]: E1201 20:26:23.286644 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.255818 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.461358 4852 generic.go:334] "Generic (PLEG): container finished" podID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerID="93c507c659599d436d50bcc26bd42033cddc5958ea2eddf7768e09be98c42fb9" exitCode=0 Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.461534 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerDied","Data":"93c507c659599d436d50bcc26bd42033cddc5958ea2eddf7768e09be98c42fb9"} Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.706881 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.846897 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.846957 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.846994 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847116 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847168 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847216 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847249 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847318 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm5ff\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847373 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847551 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847616 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret\") pod \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\" (UID: \"ff0aa0ab-3c85-4b10-a2c4-a680086db344\") " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.847729 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.848165 4852 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.848517 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.848875 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.855817 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.855843 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.856396 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.857148 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff" (OuterVolumeSpecName: "kube-api-access-dm5ff") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "kube-api-access-dm5ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.857926 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info" (OuterVolumeSpecName: "pod-info") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.879951 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data" (OuterVolumeSpecName: "config-data") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.921393 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf" (OuterVolumeSpecName: "server-conf") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953361 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953412 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953424 4852 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff0aa0ab-3c85-4b10-a2c4-a680086db344-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953435 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm5ff\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-kube-api-access-dm5ff\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953464 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953477 4852 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff0aa0ab-3c85-4b10-a2c4-a680086db344-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953490 4852 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff0aa0ab-3c85-4b10-a2c4-a680086db344-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953524 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.953534 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.982306 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 01 20:26:26 crc kubenswrapper[4852]: I1201 20:26:26.988595 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ff0aa0ab-3c85-4b10-a2c4-a680086db344" (UID: "ff0aa0ab-3c85-4b10-a2c4-a680086db344"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.055713 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.055761 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff0aa0ab-3c85-4b10-a2c4-a680086db344-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.473758 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff0aa0ab-3c85-4b10-a2c4-a680086db344","Type":"ContainerDied","Data":"b38f38ef4ea0bcea6c9f8a61212ccaafcface71f3b6220fd509568b0b97e6745"} Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.473824 4852 scope.go:117] "RemoveContainer" containerID="93c507c659599d436d50bcc26bd42033cddc5958ea2eddf7768e09be98c42fb9" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.473919 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.511607 4852 scope.go:117] "RemoveContainer" containerID="ee7c14abc615f6557fff5cc024b7db4e9b29feed38c8bee4c32bce0f856b83fd" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.525120 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.533028 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.580750 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:27 crc kubenswrapper[4852]: E1201 20:26:27.581379 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="rabbitmq" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.585515 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="rabbitmq" Dec 01 20:26:27 crc kubenswrapper[4852]: E1201 20:26:27.585594 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="setup-container" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.585609 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="setup-container" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.586071 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="rabbitmq" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.587474 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.592085 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.592376 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.592659 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.605373 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.618843 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.618989 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.619239 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jd5km" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.619560 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.776973 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777043 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777086 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777116 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b1036e7-b15b-4b19-bac9-4ce322698550-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777180 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777271 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777547 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777690 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-627tq\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-kube-api-access-627tq\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777879 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-config-data\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.777919 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.778052 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b1036e7-b15b-4b19-bac9-4ce322698550-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.880438 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.880913 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-627tq\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-kube-api-access-627tq\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.880959 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-config-data\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.880979 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881015 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b1036e7-b15b-4b19-bac9-4ce322698550-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881073 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881098 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881118 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881136 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b1036e7-b15b-4b19-bac9-4ce322698550-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881158 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.881175 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.882276 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.882643 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.882862 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.884656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.886968 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-config-data\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.888283 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.888659 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.890636 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b1036e7-b15b-4b19-bac9-4ce322698550-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.894958 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b1036e7-b15b-4b19-bac9-4ce322698550-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.896331 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b1036e7-b15b-4b19-bac9-4ce322698550-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.901307 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-627tq\" (UniqueName: \"kubernetes.io/projected/9b1036e7-b15b-4b19-bac9-4ce322698550-kube-api-access-627tq\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.918502 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"9b1036e7-b15b-4b19-bac9-4ce322698550\") " pod="openstack/rabbitmq-server-0" Dec 01 20:26:27 crc kubenswrapper[4852]: I1201 20:26:27.998512 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.010292 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.191915 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192578 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192640 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192687 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwqsc\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192750 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192794 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192859 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192893 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192917 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192949 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.192982 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data\") pod \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\" (UID: \"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca\") " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.193244 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.194381 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.194839 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.194862 4852 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.195643 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.201351 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info" (OuterVolumeSpecName: "pod-info") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.202389 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.201410 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.204213 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc" (OuterVolumeSpecName: "kube-api-access-bwqsc") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "kube-api-access-bwqsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.223746 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.262252 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data" (OuterVolumeSpecName: "config-data") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.276784 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf" (OuterVolumeSpecName: "server-conf") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298526 4852 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298553 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwqsc\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-kube-api-access-bwqsc\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298562 4852 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298570 4852 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298578 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298606 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298617 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.298626 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.324220 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.333127 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" path="/var/lib/kubelet/pods/ff0aa0ab-3c85-4b10-a2c4-a680086db344/volumes" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.370654 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" (UID: "db692ccd-f5d2-463b-9d7d-1ecccd56d4ca"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.401350 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.401480 4852 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.494292 4852 generic.go:334] "Generic (PLEG): container finished" podID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerID="774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb" exitCode=0 Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.494366 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.494372 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerDied","Data":"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb"} Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.496176 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"db692ccd-f5d2-463b-9d7d-1ecccd56d4ca","Type":"ContainerDied","Data":"0a5b1258fd18e8b0557aad2704a8c42b7b55d99eeee5a088173228f858651bac"} Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.496201 4852 scope.go:117] "RemoveContainer" containerID="774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.546251 4852 scope.go:117] "RemoveContainer" containerID="91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.550055 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.561245 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.578127 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.596743 4852 scope.go:117] "RemoveContainer" containerID="774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb" Dec 01 20:26:28 crc kubenswrapper[4852]: E1201 20:26:28.597354 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb\": container with ID starting with 774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb not found: ID does not exist" containerID="774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.597397 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb"} err="failed to get container status \"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb\": rpc error: code = NotFound desc = could not find container \"774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb\": container with ID starting with 774f50a924c7f9fdf57fa7dbff920ca94d170c03e2a5ad819163def7af5bbcbb not found: ID does not exist" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.597427 4852 scope.go:117] "RemoveContainer" containerID="91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5" Dec 01 20:26:28 crc kubenswrapper[4852]: E1201 20:26:28.598051 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5\": container with ID starting with 91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5 not found: ID does not exist" containerID="91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.598105 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5"} err="failed to get container status \"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5\": rpc error: code = NotFound desc = could not find container \"91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5\": container with ID starting with 91d8405a5709f3689b104e42cf470adbe40dd183fffbbae8254b8936173703b5 not found: ID does not exist" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.598275 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:28 crc kubenswrapper[4852]: E1201 20:26:28.598959 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="rabbitmq" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.598985 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="rabbitmq" Dec 01 20:26:28 crc kubenswrapper[4852]: E1201 20:26:28.599035 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="setup-container" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.599043 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="setup-container" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.599316 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" containerName="rabbitmq" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.600702 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.606428 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.606796 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.607015 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.608315 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-shsg6" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.608566 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.608589 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.608892 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.613225 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710106 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710177 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5eb0a95a-7ba8-48aa-80bc-245c195063b0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710215 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710263 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710330 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqdsx\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-kube-api-access-zqdsx\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710358 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710393 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710493 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710512 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710535 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.710553 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5eb0a95a-7ba8-48aa-80bc-245c195063b0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813205 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813371 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813426 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813473 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813533 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5eb0a95a-7ba8-48aa-80bc-245c195063b0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813890 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813891 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813927 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5eb0a95a-7ba8-48aa-80bc-245c195063b0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.813974 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.814734 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815226 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815081 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815284 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815402 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqdsx\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-kube-api-access-zqdsx\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815565 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5eb0a95a-7ba8-48aa-80bc-245c195063b0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815700 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.815438 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.819134 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5eb0a95a-7ba8-48aa-80bc-245c195063b0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.820292 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.821245 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5eb0a95a-7ba8-48aa-80bc-245c195063b0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.821299 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.837884 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqdsx\" (UniqueName: \"kubernetes.io/projected/5eb0a95a-7ba8-48aa-80bc-245c195063b0-kube-api-access-zqdsx\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.851034 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5eb0a95a-7ba8-48aa-80bc-245c195063b0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:28 crc kubenswrapper[4852]: I1201 20:26:28.944256 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.439810 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 20:26:29 crc kubenswrapper[4852]: W1201 20:26:29.449185 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5eb0a95a_7ba8_48aa_80bc_245c195063b0.slice/crio-2c8a67cf0f62866337ad94e7d04b30c9acdfb794681290b333d453daa72c03e8 WatchSource:0}: Error finding container 2c8a67cf0f62866337ad94e7d04b30c9acdfb794681290b333d453daa72c03e8: Status 404 returned error can't find the container with id 2c8a67cf0f62866337ad94e7d04b30c9acdfb794681290b333d453daa72c03e8 Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.566248 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b1036e7-b15b-4b19-bac9-4ce322698550","Type":"ContainerStarted","Data":"d9beab27c8adcedc8ba19e565997be2ff9ec35421d21a8dfad966bb462bd7447"} Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.570780 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5eb0a95a-7ba8-48aa-80bc-245c195063b0","Type":"ContainerStarted","Data":"2c8a67cf0f62866337ad94e7d04b30c9acdfb794681290b333d453daa72c03e8"} Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.922400 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.924125 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.927932 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 01 20:26:29 crc kubenswrapper[4852]: I1201 20:26:29.944058 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.052762 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.052819 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.052886 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.053032 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.053091 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.053137 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfvpg\" (UniqueName: \"kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.053319 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155538 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155637 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155659 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155700 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155788 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155807 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.155830 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfvpg\" (UniqueName: \"kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.156964 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.157035 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.157058 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.157094 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.157183 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.157239 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.183830 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfvpg\" (UniqueName: \"kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg\") pod \"dnsmasq-dns-68cc78dbf9-d9577\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.245718 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.339713 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db692ccd-f5d2-463b-9d7d-1ecccd56d4ca" path="/var/lib/kubelet/pods/db692ccd-f5d2-463b-9d7d-1ecccd56d4ca/volumes" Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.589716 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b1036e7-b15b-4b19-bac9-4ce322698550","Type":"ContainerStarted","Data":"575cfa35cd2a726241d9f638e73dbf46d9aa933562ca74b2228087c1d654c5cf"} Dec 01 20:26:30 crc kubenswrapper[4852]: I1201 20:26:30.796075 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:30 crc kubenswrapper[4852]: W1201 20:26:30.888367 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab01e03f_aa39_4472_8e85_78b8b34a21e9.slice/crio-40d9792ea8bd473ba28a75239e74b4c1bc2bcd5da0b83733cad0317abbad0318 WatchSource:0}: Error finding container 40d9792ea8bd473ba28a75239e74b4c1bc2bcd5da0b83733cad0317abbad0318: Status 404 returned error can't find the container with id 40d9792ea8bd473ba28a75239e74b4c1bc2bcd5da0b83733cad0317abbad0318 Dec 01 20:26:31 crc kubenswrapper[4852]: I1201 20:26:31.588862 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="ff0aa0ab-3c85-4b10-a2c4-a680086db344" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: i/o timeout" Dec 01 20:26:31 crc kubenswrapper[4852]: I1201 20:26:31.601746 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5eb0a95a-7ba8-48aa-80bc-245c195063b0","Type":"ContainerStarted","Data":"9751f5d75663b91498f3181649a8555f0c55670e3f807a4eb2349accfd4d1da5"} Dec 01 20:26:31 crc kubenswrapper[4852]: I1201 20:26:31.603478 4852 generic.go:334] "Generic (PLEG): container finished" podID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerID="4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80" exitCode=0 Dec 01 20:26:31 crc kubenswrapper[4852]: I1201 20:26:31.603590 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" event={"ID":"ab01e03f-aa39-4472-8e85-78b8b34a21e9","Type":"ContainerDied","Data":"4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80"} Dec 01 20:26:31 crc kubenswrapper[4852]: I1201 20:26:31.603648 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" event={"ID":"ab01e03f-aa39-4472-8e85-78b8b34a21e9","Type":"ContainerStarted","Data":"40d9792ea8bd473ba28a75239e74b4c1bc2bcd5da0b83733cad0317abbad0318"} Dec 01 20:26:32 crc kubenswrapper[4852]: I1201 20:26:32.620872 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" event={"ID":"ab01e03f-aa39-4472-8e85-78b8b34a21e9","Type":"ContainerStarted","Data":"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1"} Dec 01 20:26:32 crc kubenswrapper[4852]: I1201 20:26:32.621233 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:32 crc kubenswrapper[4852]: I1201 20:26:32.655653 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" podStartSLOduration=3.655634273 podStartE2EDuration="3.655634273s" podCreationTimestamp="2025-12-01 20:26:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:26:32.652280196 +0000 UTC m=+1312.579361613" watchObservedRunningTime="2025-12-01 20:26:32.655634273 +0000 UTC m=+1312.582715690" Dec 01 20:26:33 crc kubenswrapper[4852]: E1201 20:26:33.587031 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff27233_f4e2_46f3_9f02_a9cf7cd3e674.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.247653 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.357882 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.577485 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59dddb89-qnwhw"] Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.585943 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.605630 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59dddb89-qnwhw"] Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682643 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-swift-storage-0\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682758 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-svc\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682845 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-sb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682885 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-config\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682921 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-nb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.682958 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-openstack-edpm-ipam\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.683011 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d96mn\" (UniqueName: \"kubernetes.io/projected/e04bb162-f05c-4844-b368-70764dce284d-kube-api-access-d96mn\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.743150 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="dnsmasq-dns" containerID="cri-o://2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf" gracePeriod=10 Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.784710 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-svc\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.784831 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-sb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.784861 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-config\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785693 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-svc\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785661 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-sb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785731 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-nb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785804 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-openstack-edpm-ipam\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785839 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d96mn\" (UniqueName: \"kubernetes.io/projected/e04bb162-f05c-4844-b368-70764dce284d-kube-api-access-d96mn\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.785941 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-swift-storage-0\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.786273 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-config\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.786600 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-dns-swift-storage-0\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.786884 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-openstack-edpm-ipam\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.792213 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e04bb162-f05c-4844-b368-70764dce284d-ovsdbserver-nb\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.821115 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d96mn\" (UniqueName: \"kubernetes.io/projected/e04bb162-f05c-4844-b368-70764dce284d-kube-api-access-d96mn\") pod \"dnsmasq-dns-59dddb89-qnwhw\" (UID: \"e04bb162-f05c-4844-b368-70764dce284d\") " pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:40 crc kubenswrapper[4852]: I1201 20:26:40.914608 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.326821 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.499849 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.500114 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.500226 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.500402 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.501128 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rscsr\" (UniqueName: \"kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.501223 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc\") pod \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\" (UID: \"29aaf433-d1a1-4404-9cf7-af6d6982f0fa\") " Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.502319 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59dddb89-qnwhw"] Dec 01 20:26:41 crc kubenswrapper[4852]: W1201 20:26:41.505220 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode04bb162_f05c_4844_b368_70764dce284d.slice/crio-10f2f794c2f28f761d46b8e1ceb8b8a2c8b7b4b22af864ea21b694c63c17848a WatchSource:0}: Error finding container 10f2f794c2f28f761d46b8e1ceb8b8a2c8b7b4b22af864ea21b694c63c17848a: Status 404 returned error can't find the container with id 10f2f794c2f28f761d46b8e1ceb8b8a2c8b7b4b22af864ea21b694c63c17848a Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.507082 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr" (OuterVolumeSpecName: "kube-api-access-rscsr") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "kube-api-access-rscsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.563602 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.563631 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.565161 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.582669 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config" (OuterVolumeSpecName: "config") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.586106 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29aaf433-d1a1-4404-9cf7-af6d6982f0fa" (UID: "29aaf433-d1a1-4404-9cf7-af6d6982f0fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605578 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605635 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605646 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rscsr\" (UniqueName: \"kubernetes.io/projected/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-kube-api-access-rscsr\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605658 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605668 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.605682 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29aaf433-d1a1-4404-9cf7-af6d6982f0fa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.753950 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" event={"ID":"e04bb162-f05c-4844-b368-70764dce284d","Type":"ContainerStarted","Data":"870a3fae6a4bbcee2869d1b1a16f8edfa8ae740e84012842342fe548ff94cfb6"} Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.754374 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" event={"ID":"e04bb162-f05c-4844-b368-70764dce284d","Type":"ContainerStarted","Data":"10f2f794c2f28f761d46b8e1ceb8b8a2c8b7b4b22af864ea21b694c63c17848a"} Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.757387 4852 generic.go:334] "Generic (PLEG): container finished" podID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerID="2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf" exitCode=0 Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.757421 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" event={"ID":"29aaf433-d1a1-4404-9cf7-af6d6982f0fa","Type":"ContainerDied","Data":"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf"} Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.757473 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.757493 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" event={"ID":"29aaf433-d1a1-4404-9cf7-af6d6982f0fa","Type":"ContainerDied","Data":"c9cbd7072788054eba22c1a2eabf5b9ed889c0f95fff85bd001c8c1350522e11"} Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.757538 4852 scope.go:117] "RemoveContainer" containerID="2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.780565 4852 scope.go:117] "RemoveContainer" containerID="fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.860707 4852 scope.go:117] "RemoveContainer" containerID="2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf" Dec 01 20:26:41 crc kubenswrapper[4852]: E1201 20:26:41.861152 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf\": container with ID starting with 2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf not found: ID does not exist" containerID="2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.861196 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf"} err="failed to get container status \"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf\": rpc error: code = NotFound desc = could not find container \"2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf\": container with ID starting with 2b18536d6c9b434c447cdbd4c1beb9e75fb9ea1fb3dfa6593850c4fa1ef430cf not found: ID does not exist" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.861221 4852 scope.go:117] "RemoveContainer" containerID="fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47" Dec 01 20:26:41 crc kubenswrapper[4852]: E1201 20:26:41.861493 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47\": container with ID starting with fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47 not found: ID does not exist" containerID="fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.861523 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47"} err="failed to get container status \"fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47\": rpc error: code = NotFound desc = could not find container \"fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47\": container with ID starting with fcd3d82e322e9d51f12b0094bf7001796a368c2774d51df73accc7f8030a1b47 not found: ID does not exist" Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.868899 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:26:41 crc kubenswrapper[4852]: I1201 20:26:41.876886 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc646c8f9-bbmgk"] Dec 01 20:26:42 crc kubenswrapper[4852]: I1201 20:26:42.340970 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" path="/var/lib/kubelet/pods/29aaf433-d1a1-4404-9cf7-af6d6982f0fa/volumes" Dec 01 20:26:42 crc kubenswrapper[4852]: I1201 20:26:42.772837 4852 generic.go:334] "Generic (PLEG): container finished" podID="e04bb162-f05c-4844-b368-70764dce284d" containerID="870a3fae6a4bbcee2869d1b1a16f8edfa8ae740e84012842342fe548ff94cfb6" exitCode=0 Dec 01 20:26:42 crc kubenswrapper[4852]: I1201 20:26:42.772882 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" event={"ID":"e04bb162-f05c-4844-b368-70764dce284d","Type":"ContainerDied","Data":"870a3fae6a4bbcee2869d1b1a16f8edfa8ae740e84012842342fe548ff94cfb6"} Dec 01 20:26:43 crc kubenswrapper[4852]: I1201 20:26:43.788969 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" event={"ID":"e04bb162-f05c-4844-b368-70764dce284d","Type":"ContainerStarted","Data":"c1f14995e4a552d632e9b3f30bc0528e845707225e48d7c29f549a935de4adb9"} Dec 01 20:26:43 crc kubenswrapper[4852]: I1201 20:26:43.790808 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:43 crc kubenswrapper[4852]: I1201 20:26:43.845464 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" podStartSLOduration=3.8454219739999997 podStartE2EDuration="3.845421974s" podCreationTimestamp="2025-12-01 20:26:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:26:43.835595892 +0000 UTC m=+1323.762677319" watchObservedRunningTime="2025-12-01 20:26:43.845421974 +0000 UTC m=+1323.772503391" Dec 01 20:26:46 crc kubenswrapper[4852]: I1201 20:26:46.205004 4852 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bc646c8f9-bbmgk" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.195:5353: i/o timeout" Dec 01 20:26:50 crc kubenswrapper[4852]: I1201 20:26:50.229975 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:26:50 crc kubenswrapper[4852]: I1201 20:26:50.230756 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:26:50 crc kubenswrapper[4852]: I1201 20:26:50.917717 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59dddb89-qnwhw" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.020080 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.020444 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="dnsmasq-dns" containerID="cri-o://37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1" gracePeriod=10 Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.488912 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.634758 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.634846 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.634903 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.634949 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfvpg\" (UniqueName: \"kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.635194 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.635236 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.635264 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0\") pod \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\" (UID: \"ab01e03f-aa39-4472-8e85-78b8b34a21e9\") " Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.673230 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg" (OuterVolumeSpecName: "kube-api-access-lfvpg") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "kube-api-access-lfvpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.741954 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfvpg\" (UniqueName: \"kubernetes.io/projected/ab01e03f-aa39-4472-8e85-78b8b34a21e9-kube-api-access-lfvpg\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.817516 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config" (OuterVolumeSpecName: "config") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.835715 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.835995 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.844666 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.844721 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.844804 4852 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.844836 4852 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.844847 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.849948 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "ab01e03f-aa39-4472-8e85-78b8b34a21e9" (UID: "ab01e03f-aa39-4472-8e85-78b8b34a21e9"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.879972 4852 generic.go:334] "Generic (PLEG): container finished" podID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerID="37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1" exitCode=0 Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.880028 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" event={"ID":"ab01e03f-aa39-4472-8e85-78b8b34a21e9","Type":"ContainerDied","Data":"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1"} Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.880060 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" event={"ID":"ab01e03f-aa39-4472-8e85-78b8b34a21e9","Type":"ContainerDied","Data":"40d9792ea8bd473ba28a75239e74b4c1bc2bcd5da0b83733cad0317abbad0318"} Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.880081 4852 scope.go:117] "RemoveContainer" containerID="37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.880250 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68cc78dbf9-d9577" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.910814 4852 scope.go:117] "RemoveContainer" containerID="4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.925006 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.936086 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68cc78dbf9-d9577"] Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.945498 4852 scope.go:117] "RemoveContainer" containerID="37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1" Dec 01 20:26:51 crc kubenswrapper[4852]: E1201 20:26:51.946083 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1\": container with ID starting with 37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1 not found: ID does not exist" containerID="37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946146 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1"} err="failed to get container status \"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1\": rpc error: code = NotFound desc = could not find container \"37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1\": container with ID starting with 37a0833b2f3b5c63d03ab8f12d3831dd93c1067e5ea8f65c835598ff11ec2dd1 not found: ID does not exist" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946175 4852 scope.go:117] "RemoveContainer" containerID="4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80" Dec 01 20:26:51 crc kubenswrapper[4852]: E1201 20:26:51.946722 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80\": container with ID starting with 4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80 not found: ID does not exist" containerID="4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946770 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80"} err="failed to get container status \"4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80\": rpc error: code = NotFound desc = could not find container \"4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80\": container with ID starting with 4e24f05bb7c020fb673f0fb5083f217f2d181ec5733a9b1f3d9df85073c96a80 not found: ID does not exist" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946804 4852 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946828 4852 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:51 crc kubenswrapper[4852]: I1201 20:26:51.946841 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab01e03f-aa39-4472-8e85-78b8b34a21e9-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 20:26:52 crc kubenswrapper[4852]: I1201 20:26:52.341523 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" path="/var/lib/kubelet/pods/ab01e03f-aa39-4472-8e85-78b8b34a21e9/volumes" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.022548 4852 generic.go:334] "Generic (PLEG): container finished" podID="5eb0a95a-7ba8-48aa-80bc-245c195063b0" containerID="9751f5d75663b91498f3181649a8555f0c55670e3f807a4eb2349accfd4d1da5" exitCode=0 Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.022622 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5eb0a95a-7ba8-48aa-80bc-245c195063b0","Type":"ContainerDied","Data":"9751f5d75663b91498f3181649a8555f0c55670e3f807a4eb2349accfd4d1da5"} Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.025519 4852 generic.go:334] "Generic (PLEG): container finished" podID="9b1036e7-b15b-4b19-bac9-4ce322698550" containerID="575cfa35cd2a726241d9f638e73dbf46d9aa933562ca74b2228087c1d654c5cf" exitCode=0 Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.025594 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b1036e7-b15b-4b19-bac9-4ce322698550","Type":"ContainerDied","Data":"575cfa35cd2a726241d9f638e73dbf46d9aa933562ca74b2228087c1d654c5cf"} Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.199693 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6"] Dec 01 20:27:04 crc kubenswrapper[4852]: E1201 20:27:04.200583 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200606 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: E1201 20:27:04.200621 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="init" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200630 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="init" Dec 01 20:27:04 crc kubenswrapper[4852]: E1201 20:27:04.200676 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="init" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200687 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="init" Dec 01 20:27:04 crc kubenswrapper[4852]: E1201 20:27:04.200711 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200719 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200963 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab01e03f-aa39-4472-8e85-78b8b34a21e9" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.200994 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="29aaf433-d1a1-4404-9cf7-af6d6982f0fa" containerName="dnsmasq-dns" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.202019 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.212630 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.212929 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.213083 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.213286 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.214373 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6"] Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.321776 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.321908 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.321993 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.322019 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhnp4\" (UniqueName: \"kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.423693 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.423742 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhnp4\" (UniqueName: \"kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.423781 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.423867 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.428478 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.428754 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.431312 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.444683 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhnp4\" (UniqueName: \"kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:04 crc kubenswrapper[4852]: I1201 20:27:04.606328 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.043305 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b1036e7-b15b-4b19-bac9-4ce322698550","Type":"ContainerStarted","Data":"6a449f871c327f36733196c4c76222ec20e718ced63c78eded834357f8bff06a"} Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.045241 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.048911 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5eb0a95a-7ba8-48aa-80bc-245c195063b0","Type":"ContainerStarted","Data":"c311e6660d2cde844f902aabbb71531a85b03d27ac5f80dd7e331b1f33839c20"} Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.049243 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.086287 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.086261499 podStartE2EDuration="38.086261499s" podCreationTimestamp="2025-12-01 20:26:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:27:05.084488812 +0000 UTC m=+1345.011570289" watchObservedRunningTime="2025-12-01 20:27:05.086261499 +0000 UTC m=+1345.013342916" Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.127262 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.127236128 podStartE2EDuration="37.127236128s" podCreationTimestamp="2025-12-01 20:26:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:27:05.116790347 +0000 UTC m=+1345.043871804" watchObservedRunningTime="2025-12-01 20:27:05.127236128 +0000 UTC m=+1345.054317555" Dec 01 20:27:05 crc kubenswrapper[4852]: W1201 20:27:05.249875 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3edb936c_fcd0_4599_9c43_6ed0a4b957c4.slice/crio-5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1 WatchSource:0}: Error finding container 5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1: Status 404 returned error can't find the container with id 5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1 Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.250512 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6"] Dec 01 20:27:05 crc kubenswrapper[4852]: I1201 20:27:05.253441 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:27:06 crc kubenswrapper[4852]: I1201 20:27:06.059963 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" event={"ID":"3edb936c-fcd0-4599-9c43-6ed0a4b957c4","Type":"ContainerStarted","Data":"5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1"} Dec 01 20:27:14 crc kubenswrapper[4852]: I1201 20:27:14.148150 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" event={"ID":"3edb936c-fcd0-4599-9c43-6ed0a4b957c4","Type":"ContainerStarted","Data":"141ec1fe24b9b929ec60bfa58637fc19371e578694d1d1d02104ce3e1ac2e67b"} Dec 01 20:27:14 crc kubenswrapper[4852]: I1201 20:27:14.170510 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" podStartSLOduration=2.080962579 podStartE2EDuration="10.170489186s" podCreationTimestamp="2025-12-01 20:27:04 +0000 UTC" firstStartedPulling="2025-12-01 20:27:05.253154739 +0000 UTC m=+1345.180236156" lastFinishedPulling="2025-12-01 20:27:13.342681306 +0000 UTC m=+1353.269762763" observedRunningTime="2025-12-01 20:27:14.167431659 +0000 UTC m=+1354.094513086" watchObservedRunningTime="2025-12-01 20:27:14.170489186 +0000 UTC m=+1354.097570623" Dec 01 20:27:18 crc kubenswrapper[4852]: I1201 20:27:18.003724 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 20:27:18 crc kubenswrapper[4852]: I1201 20:27:18.949718 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 20:27:20 crc kubenswrapper[4852]: I1201 20:27:20.235674 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:27:20 crc kubenswrapper[4852]: I1201 20:27:20.235942 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:27:20 crc kubenswrapper[4852]: I1201 20:27:20.235988 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:27:20 crc kubenswrapper[4852]: I1201 20:27:20.236778 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:27:20 crc kubenswrapper[4852]: I1201 20:27:20.236826 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a" gracePeriod=600 Dec 01 20:27:21 crc kubenswrapper[4852]: I1201 20:27:21.231177 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a" exitCode=0 Dec 01 20:27:21 crc kubenswrapper[4852]: I1201 20:27:21.231233 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a"} Dec 01 20:27:21 crc kubenswrapper[4852]: I1201 20:27:21.231817 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89"} Dec 01 20:27:21 crc kubenswrapper[4852]: I1201 20:27:21.231844 4852 scope.go:117] "RemoveContainer" containerID="4f3ce873c2afbf23e359eb217337f90c2f601b26a34a306541975911addf4c32" Dec 01 20:27:25 crc kubenswrapper[4852]: I1201 20:27:25.291118 4852 generic.go:334] "Generic (PLEG): container finished" podID="3edb936c-fcd0-4599-9c43-6ed0a4b957c4" containerID="141ec1fe24b9b929ec60bfa58637fc19371e578694d1d1d02104ce3e1ac2e67b" exitCode=0 Dec 01 20:27:25 crc kubenswrapper[4852]: I1201 20:27:25.291215 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" event={"ID":"3edb936c-fcd0-4599-9c43-6ed0a4b957c4","Type":"ContainerDied","Data":"141ec1fe24b9b929ec60bfa58637fc19371e578694d1d1d02104ce3e1ac2e67b"} Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.743374 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.858820 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle\") pod \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.858994 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory\") pod \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.859124 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhnp4\" (UniqueName: \"kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4\") pod \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.859271 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key\") pod \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\" (UID: \"3edb936c-fcd0-4599-9c43-6ed0a4b957c4\") " Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.867145 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4" (OuterVolumeSpecName: "kube-api-access-hhnp4") pod "3edb936c-fcd0-4599-9c43-6ed0a4b957c4" (UID: "3edb936c-fcd0-4599-9c43-6ed0a4b957c4"). InnerVolumeSpecName "kube-api-access-hhnp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.867709 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3edb936c-fcd0-4599-9c43-6ed0a4b957c4" (UID: "3edb936c-fcd0-4599-9c43-6ed0a4b957c4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.893130 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory" (OuterVolumeSpecName: "inventory") pod "3edb936c-fcd0-4599-9c43-6ed0a4b957c4" (UID: "3edb936c-fcd0-4599-9c43-6ed0a4b957c4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.895685 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3edb936c-fcd0-4599-9c43-6ed0a4b957c4" (UID: "3edb936c-fcd0-4599-9c43-6ed0a4b957c4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.962024 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.962059 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhnp4\" (UniqueName: \"kubernetes.io/projected/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-kube-api-access-hhnp4\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.962072 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:26 crc kubenswrapper[4852]: I1201 20:27:26.962085 4852 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3edb936c-fcd0-4599-9c43-6ed0a4b957c4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.323684 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" event={"ID":"3edb936c-fcd0-4599-9c43-6ed0a4b957c4","Type":"ContainerDied","Data":"5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1"} Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.323753 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a15c0bf5cbb00b18bf721f9f68c5313c4a45988598563b5014f393998a88cb1" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.323796 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.501512 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn"] Dec 01 20:27:27 crc kubenswrapper[4852]: E1201 20:27:27.502606 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3edb936c-fcd0-4599-9c43-6ed0a4b957c4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.502740 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="3edb936c-fcd0-4599-9c43-6ed0a4b957c4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.503072 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="3edb936c-fcd0-4599-9c43-6ed0a4b957c4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.504014 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.506747 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.507511 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.507723 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.509255 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.533563 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn"] Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.577443 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.577862 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvngm\" (UniqueName: \"kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.578051 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.680566 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvngm\" (UniqueName: \"kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.680674 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.680756 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.685519 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.686645 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.698199 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvngm\" (UniqueName: \"kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-26vmn\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:27 crc kubenswrapper[4852]: I1201 20:27:27.846240 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:28 crc kubenswrapper[4852]: W1201 20:27:28.442888 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79e71e37_cc0d_42e9_89dd_9cb4722aa53a.slice/crio-8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00 WatchSource:0}: Error finding container 8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00: Status 404 returned error can't find the container with id 8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00 Dec 01 20:27:28 crc kubenswrapper[4852]: I1201 20:27:28.446761 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn"] Dec 01 20:27:29 crc kubenswrapper[4852]: I1201 20:27:29.350418 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" event={"ID":"79e71e37-cc0d-42e9-89dd-9cb4722aa53a","Type":"ContainerStarted","Data":"8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00"} Dec 01 20:27:30 crc kubenswrapper[4852]: I1201 20:27:30.372580 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" event={"ID":"79e71e37-cc0d-42e9-89dd-9cb4722aa53a","Type":"ContainerStarted","Data":"80b9741e802f350e63c21d66d7841a72479e7be96311296407ad9293b288fe1d"} Dec 01 20:27:30 crc kubenswrapper[4852]: I1201 20:27:30.403602 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" podStartSLOduration=2.690174775 podStartE2EDuration="3.403580624s" podCreationTimestamp="2025-12-01 20:27:27 +0000 UTC" firstStartedPulling="2025-12-01 20:27:28.445820938 +0000 UTC m=+1368.372902365" lastFinishedPulling="2025-12-01 20:27:29.159226787 +0000 UTC m=+1369.086308214" observedRunningTime="2025-12-01 20:27:30.391421773 +0000 UTC m=+1370.318503190" watchObservedRunningTime="2025-12-01 20:27:30.403580624 +0000 UTC m=+1370.330662041" Dec 01 20:27:32 crc kubenswrapper[4852]: I1201 20:27:32.396652 4852 generic.go:334] "Generic (PLEG): container finished" podID="79e71e37-cc0d-42e9-89dd-9cb4722aa53a" containerID="80b9741e802f350e63c21d66d7841a72479e7be96311296407ad9293b288fe1d" exitCode=0 Dec 01 20:27:32 crc kubenswrapper[4852]: I1201 20:27:32.396841 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" event={"ID":"79e71e37-cc0d-42e9-89dd-9cb4722aa53a","Type":"ContainerDied","Data":"80b9741e802f350e63c21d66d7841a72479e7be96311296407ad9293b288fe1d"} Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.835301 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.917218 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvngm\" (UniqueName: \"kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm\") pod \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.917405 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory\") pod \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.917469 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key\") pod \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\" (UID: \"79e71e37-cc0d-42e9-89dd-9cb4722aa53a\") " Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.922860 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm" (OuterVolumeSpecName: "kube-api-access-pvngm") pod "79e71e37-cc0d-42e9-89dd-9cb4722aa53a" (UID: "79e71e37-cc0d-42e9-89dd-9cb4722aa53a"). InnerVolumeSpecName "kube-api-access-pvngm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.955626 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "79e71e37-cc0d-42e9-89dd-9cb4722aa53a" (UID: "79e71e37-cc0d-42e9-89dd-9cb4722aa53a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:27:33 crc kubenswrapper[4852]: I1201 20:27:33.961446 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory" (OuterVolumeSpecName: "inventory") pod "79e71e37-cc0d-42e9-89dd-9cb4722aa53a" (UID: "79e71e37-cc0d-42e9-89dd-9cb4722aa53a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.019575 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvngm\" (UniqueName: \"kubernetes.io/projected/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-kube-api-access-pvngm\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.019618 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.019630 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79e71e37-cc0d-42e9-89dd-9cb4722aa53a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.422323 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" event={"ID":"79e71e37-cc0d-42e9-89dd-9cb4722aa53a","Type":"ContainerDied","Data":"8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00"} Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.422395 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b9d89e18c5a338ed2c9d51778e166d3feefb75b1630ee95ea82df03142b5f00" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.422395 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-26vmn" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.505777 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm"] Dec 01 20:27:34 crc kubenswrapper[4852]: E1201 20:27:34.506663 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79e71e37-cc0d-42e9-89dd-9cb4722aa53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.506687 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="79e71e37-cc0d-42e9-89dd-9cb4722aa53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.507028 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="79e71e37-cc0d-42e9-89dd-9cb4722aa53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.507996 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.511765 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.511814 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.512185 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.513044 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.515709 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm"] Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.535185 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.535310 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.535369 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d27pv\" (UniqueName: \"kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.535537 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.637639 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.637801 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.637916 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.638004 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d27pv\" (UniqueName: \"kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.643707 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.644336 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.645116 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.661390 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d27pv\" (UniqueName: \"kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:34 crc kubenswrapper[4852]: I1201 20:27:34.834442 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:27:35 crc kubenswrapper[4852]: I1201 20:27:35.550356 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm"] Dec 01 20:27:36 crc kubenswrapper[4852]: I1201 20:27:36.465832 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" event={"ID":"03d266a8-6787-4bc8-8836-d11fb0d078b4","Type":"ContainerStarted","Data":"eaa194d0496ed45f825031ac0e002b4608f490edacd0455e30111a6c129880fb"} Dec 01 20:27:37 crc kubenswrapper[4852]: I1201 20:27:37.481472 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" event={"ID":"03d266a8-6787-4bc8-8836-d11fb0d078b4","Type":"ContainerStarted","Data":"33aba7c803cadb73ec22c694069664884d82f889dd19ba4c1fc5b463f4cf8eaf"} Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.127529 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" podStartSLOduration=3.4944308299999998 podStartE2EDuration="4.12750662s" podCreationTimestamp="2025-12-01 20:27:34 +0000 UTC" firstStartedPulling="2025-12-01 20:27:35.552320944 +0000 UTC m=+1375.479402361" lastFinishedPulling="2025-12-01 20:27:36.185396694 +0000 UTC m=+1376.112478151" observedRunningTime="2025-12-01 20:27:37.523069177 +0000 UTC m=+1377.450150634" watchObservedRunningTime="2025-12-01 20:27:38.12750662 +0000 UTC m=+1378.054588047" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.139343 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.142218 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.155810 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.242378 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.242472 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.242511 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w782\" (UniqueName: \"kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.344113 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.344508 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.344659 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w782\" (UniqueName: \"kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.344709 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.345053 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.365504 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w782\" (UniqueName: \"kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782\") pod \"redhat-operators-z27pn\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:38 crc kubenswrapper[4852]: I1201 20:27:38.468303 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:40 crc kubenswrapper[4852]: I1201 20:27:40.156686 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:27:40 crc kubenswrapper[4852]: I1201 20:27:40.515225 4852 generic.go:334] "Generic (PLEG): container finished" podID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerID="121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2" exitCode=0 Dec 01 20:27:40 crc kubenswrapper[4852]: I1201 20:27:40.515314 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerDied","Data":"121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2"} Dec 01 20:27:40 crc kubenswrapper[4852]: I1201 20:27:40.515595 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerStarted","Data":"1efca837c8074da97173089b18d3315c039a1ea3123bd470547e9e424c8ba81c"} Dec 01 20:27:42 crc kubenswrapper[4852]: I1201 20:27:42.536539 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerStarted","Data":"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff"} Dec 01 20:27:44 crc kubenswrapper[4852]: I1201 20:27:44.557138 4852 generic.go:334] "Generic (PLEG): container finished" podID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerID="6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff" exitCode=0 Dec 01 20:27:44 crc kubenswrapper[4852]: I1201 20:27:44.557249 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerDied","Data":"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff"} Dec 01 20:27:47 crc kubenswrapper[4852]: I1201 20:27:47.590333 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerStarted","Data":"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c"} Dec 01 20:27:47 crc kubenswrapper[4852]: I1201 20:27:47.617869 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z27pn" podStartSLOduration=3.537093368 podStartE2EDuration="9.617850903s" podCreationTimestamp="2025-12-01 20:27:38 +0000 UTC" firstStartedPulling="2025-12-01 20:27:40.517008953 +0000 UTC m=+1380.444090380" lastFinishedPulling="2025-12-01 20:27:46.597766498 +0000 UTC m=+1386.524847915" observedRunningTime="2025-12-01 20:27:47.615424637 +0000 UTC m=+1387.542506074" watchObservedRunningTime="2025-12-01 20:27:47.617850903 +0000 UTC m=+1387.544932310" Dec 01 20:27:48 crc kubenswrapper[4852]: I1201 20:27:48.468649 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:48 crc kubenswrapper[4852]: I1201 20:27:48.469157 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:49 crc kubenswrapper[4852]: I1201 20:27:49.584256 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z27pn" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="registry-server" probeResult="failure" output=< Dec 01 20:27:49 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:27:49 crc kubenswrapper[4852]: > Dec 01 20:27:58 crc kubenswrapper[4852]: I1201 20:27:58.559076 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:58 crc kubenswrapper[4852]: I1201 20:27:58.655919 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:27:58 crc kubenswrapper[4852]: I1201 20:27:58.828166 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:27:59 crc kubenswrapper[4852]: I1201 20:27:59.738825 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z27pn" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="registry-server" containerID="cri-o://2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c" gracePeriod=2 Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.334831 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.446183 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content\") pod \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.446439 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w782\" (UniqueName: \"kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782\") pod \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.446499 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities\") pod \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\" (UID: \"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a\") " Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.453516 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities" (OuterVolumeSpecName: "utilities") pod "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" (UID: "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.457568 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782" (OuterVolumeSpecName: "kube-api-access-5w782") pod "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" (UID: "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a"). InnerVolumeSpecName "kube-api-access-5w782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.549135 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w782\" (UniqueName: \"kubernetes.io/projected/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-kube-api-access-5w782\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.549361 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.591668 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" (UID: "cb6d9cf1-6435-4ba2-8d21-dd5f803b043a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.651570 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.751716 4852 generic.go:334] "Generic (PLEG): container finished" podID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerID="2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c" exitCode=0 Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.751760 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerDied","Data":"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c"} Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.751785 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27pn" event={"ID":"cb6d9cf1-6435-4ba2-8d21-dd5f803b043a","Type":"ContainerDied","Data":"1efca837c8074da97173089b18d3315c039a1ea3123bd470547e9e424c8ba81c"} Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.751804 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27pn" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.751822 4852 scope.go:117] "RemoveContainer" containerID="2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.771474 4852 scope.go:117] "RemoveContainer" containerID="6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.795655 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.805688 4852 scope.go:117] "RemoveContainer" containerID="121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.806309 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z27pn"] Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.858570 4852 scope.go:117] "RemoveContainer" containerID="2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c" Dec 01 20:28:00 crc kubenswrapper[4852]: E1201 20:28:00.859138 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c\": container with ID starting with 2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c not found: ID does not exist" containerID="2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.859191 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c"} err="failed to get container status \"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c\": rpc error: code = NotFound desc = could not find container \"2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c\": container with ID starting with 2733ac62441c28eb5da6c98d4652391443f295278ec7dc5edc6970c717b15f3c not found: ID does not exist" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.859218 4852 scope.go:117] "RemoveContainer" containerID="6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff" Dec 01 20:28:00 crc kubenswrapper[4852]: E1201 20:28:00.859669 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff\": container with ID starting with 6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff not found: ID does not exist" containerID="6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.859705 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff"} err="failed to get container status \"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff\": rpc error: code = NotFound desc = could not find container \"6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff\": container with ID starting with 6241ef07fd36c2acb5cae0e58d791a53dbe7dbeaa486b06ea4fbb1b2149c37ff not found: ID does not exist" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.859729 4852 scope.go:117] "RemoveContainer" containerID="121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2" Dec 01 20:28:00 crc kubenswrapper[4852]: E1201 20:28:00.860015 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2\": container with ID starting with 121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2 not found: ID does not exist" containerID="121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2" Dec 01 20:28:00 crc kubenswrapper[4852]: I1201 20:28:00.860041 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2"} err="failed to get container status \"121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2\": rpc error: code = NotFound desc = could not find container \"121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2\": container with ID starting with 121bc681c9316a56f9f00298f7d1e4d14ae71aeeb21feaccd88e3b745bdf25b2 not found: ID does not exist" Dec 01 20:28:02 crc kubenswrapper[4852]: I1201 20:28:02.335268 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" path="/var/lib/kubelet/pods/cb6d9cf1-6435-4ba2-8d21-dd5f803b043a/volumes" Dec 01 20:28:54 crc kubenswrapper[4852]: I1201 20:28:54.877141 4852 scope.go:117] "RemoveContainer" containerID="62fc2e6867cf2cdbefdb5687379aa7408afaafc9da417a686f338f6a58f912e2" Dec 01 20:28:54 crc kubenswrapper[4852]: I1201 20:28:54.903720 4852 scope.go:117] "RemoveContainer" containerID="cd3cd2fe4272140c28b9d4dbc5c83e3f656d8dfc26b71ddf8a255959aab5aa06" Dec 01 20:28:54 crc kubenswrapper[4852]: I1201 20:28:54.926912 4852 scope.go:117] "RemoveContainer" containerID="708bc0d25bab9090fcc2fea698be48d59f3abb410de4a18ae9633f7a71c6f6d0" Dec 01 20:28:54 crc kubenswrapper[4852]: I1201 20:28:54.989740 4852 scope.go:117] "RemoveContainer" containerID="ba8bed2cd04a2735dd513c523a66c1c0e173664b72404bbfce476806f8aebcf6" Dec 01 20:28:55 crc kubenswrapper[4852]: I1201 20:28:55.032179 4852 scope.go:117] "RemoveContainer" containerID="527408c5ce0bc6d7ccdf8972392efd9fd18b13e5b12687f10cd2173a391268db" Dec 01 20:28:55 crc kubenswrapper[4852]: I1201 20:28:55.074523 4852 scope.go:117] "RemoveContainer" containerID="f452cda2ec576d4e0c3a7fc42330501f5d02a2af0de2d78ea32148fcb30aca98" Dec 01 20:28:55 crc kubenswrapper[4852]: I1201 20:28:55.113760 4852 scope.go:117] "RemoveContainer" containerID="1966432c165e2d879fa973f536e9b94fe0f1fa19b654677eccc855f2b3d20c3a" Dec 01 20:28:55 crc kubenswrapper[4852]: I1201 20:28:55.153884 4852 scope.go:117] "RemoveContainer" containerID="1f900b176e494dab4b9de7ba7df62acc2d5a222173b1798070aa89420bfd73d5" Dec 01 20:29:20 crc kubenswrapper[4852]: I1201 20:29:20.231223 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:29:20 crc kubenswrapper[4852]: I1201 20:29:20.232129 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:29:50 crc kubenswrapper[4852]: I1201 20:29:50.231697 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:29:50 crc kubenswrapper[4852]: I1201 20:29:50.232181 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.946553 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:29:54 crc kubenswrapper[4852]: E1201 20:29:54.948189 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="registry-server" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.948231 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="registry-server" Dec 01 20:29:54 crc kubenswrapper[4852]: E1201 20:29:54.948302 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="extract-content" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.948312 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="extract-content" Dec 01 20:29:54 crc kubenswrapper[4852]: E1201 20:29:54.948339 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="extract-utilities" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.948348 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="extract-utilities" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.948760 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6d9cf1-6435-4ba2-8d21-dd5f803b043a" containerName="registry-server" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.950412 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:54 crc kubenswrapper[4852]: I1201 20:29:54.961515 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.118399 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.118569 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.118603 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9hgz\" (UniqueName: \"kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.220918 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.221003 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.221048 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9hgz\" (UniqueName: \"kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.221897 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.221943 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.244572 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9hgz\" (UniqueName: \"kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz\") pod \"community-operators-n7ql9\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.279877 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:29:55 crc kubenswrapper[4852]: I1201 20:29:55.850342 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:29:56 crc kubenswrapper[4852]: I1201 20:29:56.255549 4852 generic.go:334] "Generic (PLEG): container finished" podID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerID="0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2" exitCode=0 Dec 01 20:29:56 crc kubenswrapper[4852]: I1201 20:29:56.255630 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerDied","Data":"0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2"} Dec 01 20:29:56 crc kubenswrapper[4852]: I1201 20:29:56.255689 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerStarted","Data":"1946391bdf7a2787ed8f23470a072bac9d82aefaf2559e845ef7e7fb9cbb2b31"} Dec 01 20:29:58 crc kubenswrapper[4852]: I1201 20:29:58.283154 4852 generic.go:334] "Generic (PLEG): container finished" podID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerID="a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df" exitCode=0 Dec 01 20:29:58 crc kubenswrapper[4852]: I1201 20:29:58.283211 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerDied","Data":"a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df"} Dec 01 20:29:59 crc kubenswrapper[4852]: I1201 20:29:59.301487 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerStarted","Data":"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404"} Dec 01 20:29:59 crc kubenswrapper[4852]: I1201 20:29:59.326634 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n7ql9" podStartSLOduration=2.656620798 podStartE2EDuration="5.326615077s" podCreationTimestamp="2025-12-01 20:29:54 +0000 UTC" firstStartedPulling="2025-12-01 20:29:56.259217387 +0000 UTC m=+1516.186298804" lastFinishedPulling="2025-12-01 20:29:58.929211636 +0000 UTC m=+1518.856293083" observedRunningTime="2025-12-01 20:29:59.323445667 +0000 UTC m=+1519.250527084" watchObservedRunningTime="2025-12-01 20:29:59.326615077 +0000 UTC m=+1519.253696494" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.160058 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q"] Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.162017 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.174292 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.174738 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.183992 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q"] Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.229976 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.230104 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.230165 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrj5n\" (UniqueName: \"kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.332537 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.332671 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.332736 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrj5n\" (UniqueName: \"kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.333349 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.341241 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.351346 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrj5n\" (UniqueName: \"kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n\") pod \"collect-profiles-29410350-6666q\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:00 crc kubenswrapper[4852]: I1201 20:30:00.481411 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:01 crc kubenswrapper[4852]: I1201 20:30:01.025898 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q"] Dec 01 20:30:01 crc kubenswrapper[4852]: I1201 20:30:01.323169 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" event={"ID":"dbbe5ad8-65e3-4453-b3a2-335195a08269","Type":"ContainerStarted","Data":"62dc6f8439dcc4dc8a25bbeff1400744884e1a7bc33297e8ee9ca9a0d63cea0d"} Dec 01 20:30:01 crc kubenswrapper[4852]: I1201 20:30:01.323207 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" event={"ID":"dbbe5ad8-65e3-4453-b3a2-335195a08269","Type":"ContainerStarted","Data":"a93d4b84da4c3d7586eace8d71dff55c3c24338416179e2b14ba3161ff68b958"} Dec 01 20:30:02 crc kubenswrapper[4852]: I1201 20:30:02.336232 4852 generic.go:334] "Generic (PLEG): container finished" podID="dbbe5ad8-65e3-4453-b3a2-335195a08269" containerID="62dc6f8439dcc4dc8a25bbeff1400744884e1a7bc33297e8ee9ca9a0d63cea0d" exitCode=0 Dec 01 20:30:02 crc kubenswrapper[4852]: I1201 20:30:02.343593 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" event={"ID":"dbbe5ad8-65e3-4453-b3a2-335195a08269","Type":"ContainerDied","Data":"62dc6f8439dcc4dc8a25bbeff1400744884e1a7bc33297e8ee9ca9a0d63cea0d"} Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.823393 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.914360 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume\") pod \"dbbe5ad8-65e3-4453-b3a2-335195a08269\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.914483 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrj5n\" (UniqueName: \"kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n\") pod \"dbbe5ad8-65e3-4453-b3a2-335195a08269\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.914535 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume\") pod \"dbbe5ad8-65e3-4453-b3a2-335195a08269\" (UID: \"dbbe5ad8-65e3-4453-b3a2-335195a08269\") " Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.915646 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume" (OuterVolumeSpecName: "config-volume") pod "dbbe5ad8-65e3-4453-b3a2-335195a08269" (UID: "dbbe5ad8-65e3-4453-b3a2-335195a08269"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.922023 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dbbe5ad8-65e3-4453-b3a2-335195a08269" (UID: "dbbe5ad8-65e3-4453-b3a2-335195a08269"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:30:03 crc kubenswrapper[4852]: I1201 20:30:03.930262 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n" (OuterVolumeSpecName: "kube-api-access-xrj5n") pod "dbbe5ad8-65e3-4453-b3a2-335195a08269" (UID: "dbbe5ad8-65e3-4453-b3a2-335195a08269"). InnerVolumeSpecName "kube-api-access-xrj5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.017858 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dbbe5ad8-65e3-4453-b3a2-335195a08269-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.017909 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrj5n\" (UniqueName: \"kubernetes.io/projected/dbbe5ad8-65e3-4453-b3a2-335195a08269-kube-api-access-xrj5n\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.017928 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dbbe5ad8-65e3-4453-b3a2-335195a08269-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.368775 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" event={"ID":"dbbe5ad8-65e3-4453-b3a2-335195a08269","Type":"ContainerDied","Data":"a93d4b84da4c3d7586eace8d71dff55c3c24338416179e2b14ba3161ff68b958"} Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.368826 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a93d4b84da4c3d7586eace8d71dff55c3c24338416179e2b14ba3161ff68b958" Dec 01 20:30:04 crc kubenswrapper[4852]: I1201 20:30:04.368904 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q" Dec 01 20:30:05 crc kubenswrapper[4852]: I1201 20:30:05.280135 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:05 crc kubenswrapper[4852]: I1201 20:30:05.280822 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:05 crc kubenswrapper[4852]: I1201 20:30:05.361576 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:05 crc kubenswrapper[4852]: I1201 20:30:05.438335 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:05 crc kubenswrapper[4852]: I1201 20:30:05.620522 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:30:07 crc kubenswrapper[4852]: I1201 20:30:07.441366 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n7ql9" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="registry-server" containerID="cri-o://8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404" gracePeriod=2 Dec 01 20:30:07 crc kubenswrapper[4852]: I1201 20:30:07.995706 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.144590 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9hgz\" (UniqueName: \"kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz\") pod \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.144641 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities\") pod \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.144863 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content\") pod \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\" (UID: \"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb\") " Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.146120 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities" (OuterVolumeSpecName: "utilities") pod "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" (UID: "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.151192 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz" (OuterVolumeSpecName: "kube-api-access-m9hgz") pod "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" (UID: "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb"). InnerVolumeSpecName "kube-api-access-m9hgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.197418 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" (UID: "79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.247446 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9hgz\" (UniqueName: \"kubernetes.io/projected/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-kube-api-access-m9hgz\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.247499 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.247513 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.454942 4852 generic.go:334] "Generic (PLEG): container finished" podID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerID="8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404" exitCode=0 Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.454999 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerDied","Data":"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404"} Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.455041 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7ql9" event={"ID":"79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb","Type":"ContainerDied","Data":"1946391bdf7a2787ed8f23470a072bac9d82aefaf2559e845ef7e7fb9cbb2b31"} Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.455064 4852 scope.go:117] "RemoveContainer" containerID="8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.456468 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7ql9" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.492951 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.495657 4852 scope.go:117] "RemoveContainer" containerID="a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.505925 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n7ql9"] Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.523724 4852 scope.go:117] "RemoveContainer" containerID="0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.567821 4852 scope.go:117] "RemoveContainer" containerID="8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404" Dec 01 20:30:08 crc kubenswrapper[4852]: E1201 20:30:08.568325 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404\": container with ID starting with 8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404 not found: ID does not exist" containerID="8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.568372 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404"} err="failed to get container status \"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404\": rpc error: code = NotFound desc = could not find container \"8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404\": container with ID starting with 8814e58a84140cabb4f23ccf799f1ff190033684f07dbd69e937201589c3f404 not found: ID does not exist" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.568402 4852 scope.go:117] "RemoveContainer" containerID="a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df" Dec 01 20:30:08 crc kubenswrapper[4852]: E1201 20:30:08.568854 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df\": container with ID starting with a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df not found: ID does not exist" containerID="a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.568917 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df"} err="failed to get container status \"a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df\": rpc error: code = NotFound desc = could not find container \"a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df\": container with ID starting with a925e023513043f375975dfe33814876e3c8a636504236931bcedf6b8aa0b5df not found: ID does not exist" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.568951 4852 scope.go:117] "RemoveContainer" containerID="0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2" Dec 01 20:30:08 crc kubenswrapper[4852]: E1201 20:30:08.569285 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2\": container with ID starting with 0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2 not found: ID does not exist" containerID="0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2" Dec 01 20:30:08 crc kubenswrapper[4852]: I1201 20:30:08.569344 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2"} err="failed to get container status \"0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2\": rpc error: code = NotFound desc = could not find container \"0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2\": container with ID starting with 0be16ebb92a9983fcd0e9d0433e67665c36afcb6dffeafc028668c41d35291b2 not found: ID does not exist" Dec 01 20:30:10 crc kubenswrapper[4852]: I1201 20:30:10.356145 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" path="/var/lib/kubelet/pods/79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb/volumes" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.230195 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.230629 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.230674 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.231322 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.231385 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" gracePeriod=600 Dec 01 20:30:20 crc kubenswrapper[4852]: E1201 20:30:20.395619 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.621275 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" exitCode=0 Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.621351 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89"} Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.621720 4852 scope.go:117] "RemoveContainer" containerID="f8b1918be0b9d57d53db2b9e0a1f8d939b6b8e07171e1e5c9e19c272dc47420a" Dec 01 20:30:20 crc kubenswrapper[4852]: I1201 20:30:20.622363 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:30:20 crc kubenswrapper[4852]: E1201 20:30:20.622684 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.320022 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:30:31 crc kubenswrapper[4852]: E1201 20:30:31.320890 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.888666 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:31 crc kubenswrapper[4852]: E1201 20:30:31.889160 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="extract-content" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889179 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="extract-content" Dec 01 20:30:31 crc kubenswrapper[4852]: E1201 20:30:31.889216 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbe5ad8-65e3-4453-b3a2-335195a08269" containerName="collect-profiles" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889224 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbe5ad8-65e3-4453-b3a2-335195a08269" containerName="collect-profiles" Dec 01 20:30:31 crc kubenswrapper[4852]: E1201 20:30:31.889235 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="registry-server" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889241 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="registry-server" Dec 01 20:30:31 crc kubenswrapper[4852]: E1201 20:30:31.889266 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="extract-utilities" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889273 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="extract-utilities" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889509 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbbe5ad8-65e3-4453-b3a2-335195a08269" containerName="collect-profiles" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.889568 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b4ad6c-a5c1-4b5f-a8a5-7c99f3cdf2eb" containerName="registry-server" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.891055 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.925860 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.983102 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcvnd\" (UniqueName: \"kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.983245 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:31 crc kubenswrapper[4852]: I1201 20:30:31.983314 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.085363 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.085559 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcvnd\" (UniqueName: \"kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.085639 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.086317 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.086351 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.106336 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcvnd\" (UniqueName: \"kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd\") pod \"certified-operators-x5hpj\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.224285 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.718988 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:32 crc kubenswrapper[4852]: I1201 20:30:32.744109 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerStarted","Data":"af06096e11a3dc8bf780a4e944551ccd749acbaa0bd7675a5984d5c26c7c51b1"} Dec 01 20:30:33 crc kubenswrapper[4852]: I1201 20:30:33.756079 4852 generic.go:334] "Generic (PLEG): container finished" podID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerID="670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa" exitCode=0 Dec 01 20:30:33 crc kubenswrapper[4852]: I1201 20:30:33.756282 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerDied","Data":"670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa"} Dec 01 20:30:34 crc kubenswrapper[4852]: I1201 20:30:34.767114 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerStarted","Data":"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed"} Dec 01 20:30:35 crc kubenswrapper[4852]: I1201 20:30:35.782806 4852 generic.go:334] "Generic (PLEG): container finished" podID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerID="a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed" exitCode=0 Dec 01 20:30:35 crc kubenswrapper[4852]: I1201 20:30:35.782879 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerDied","Data":"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed"} Dec 01 20:30:37 crc kubenswrapper[4852]: I1201 20:30:37.802913 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerStarted","Data":"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2"} Dec 01 20:30:37 crc kubenswrapper[4852]: I1201 20:30:37.826807 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x5hpj" podStartSLOduration=3.390920359 podStartE2EDuration="6.826786432s" podCreationTimestamp="2025-12-01 20:30:31 +0000 UTC" firstStartedPulling="2025-12-01 20:30:33.757957483 +0000 UTC m=+1553.685038890" lastFinishedPulling="2025-12-01 20:30:37.193823546 +0000 UTC m=+1557.120904963" observedRunningTime="2025-12-01 20:30:37.820529226 +0000 UTC m=+1557.747610653" watchObservedRunningTime="2025-12-01 20:30:37.826786432 +0000 UTC m=+1557.753867849" Dec 01 20:30:42 crc kubenswrapper[4852]: I1201 20:30:42.225411 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:42 crc kubenswrapper[4852]: I1201 20:30:42.226048 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:42 crc kubenswrapper[4852]: I1201 20:30:42.280007 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:42 crc kubenswrapper[4852]: I1201 20:30:42.898290 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:42 crc kubenswrapper[4852]: I1201 20:30:42.949510 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:44 crc kubenswrapper[4852]: I1201 20:30:44.320186 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:30:44 crc kubenswrapper[4852]: E1201 20:30:44.320790 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:30:44 crc kubenswrapper[4852]: I1201 20:30:44.869162 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x5hpj" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="registry-server" containerID="cri-o://7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2" gracePeriod=2 Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.460494 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.602907 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities\") pod \"805639f3-263b-41b4-85ba-1fb9ced8eb46\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.603042 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcvnd\" (UniqueName: \"kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd\") pod \"805639f3-263b-41b4-85ba-1fb9ced8eb46\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.603095 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content\") pod \"805639f3-263b-41b4-85ba-1fb9ced8eb46\" (UID: \"805639f3-263b-41b4-85ba-1fb9ced8eb46\") " Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.604549 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities" (OuterVolumeSpecName: "utilities") pod "805639f3-263b-41b4-85ba-1fb9ced8eb46" (UID: "805639f3-263b-41b4-85ba-1fb9ced8eb46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.611403 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd" (OuterVolumeSpecName: "kube-api-access-hcvnd") pod "805639f3-263b-41b4-85ba-1fb9ced8eb46" (UID: "805639f3-263b-41b4-85ba-1fb9ced8eb46"). InnerVolumeSpecName "kube-api-access-hcvnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.657435 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "805639f3-263b-41b4-85ba-1fb9ced8eb46" (UID: "805639f3-263b-41b4-85ba-1fb9ced8eb46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.705870 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.705903 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcvnd\" (UniqueName: \"kubernetes.io/projected/805639f3-263b-41b4-85ba-1fb9ced8eb46-kube-api-access-hcvnd\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.705913 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/805639f3-263b-41b4-85ba-1fb9ced8eb46-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.881546 4852 generic.go:334] "Generic (PLEG): container finished" podID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerID="7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2" exitCode=0 Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.881592 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerDied","Data":"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2"} Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.881869 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5hpj" event={"ID":"805639f3-263b-41b4-85ba-1fb9ced8eb46","Type":"ContainerDied","Data":"af06096e11a3dc8bf780a4e944551ccd749acbaa0bd7675a5984d5c26c7c51b1"} Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.881892 4852 scope.go:117] "RemoveContainer" containerID="7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.881632 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5hpj" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.902001 4852 scope.go:117] "RemoveContainer" containerID="a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.918095 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.927379 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x5hpj"] Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.946801 4852 scope.go:117] "RemoveContainer" containerID="670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.992424 4852 scope.go:117] "RemoveContainer" containerID="7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2" Dec 01 20:30:45 crc kubenswrapper[4852]: E1201 20:30:45.992835 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2\": container with ID starting with 7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2 not found: ID does not exist" containerID="7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.992866 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2"} err="failed to get container status \"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2\": rpc error: code = NotFound desc = could not find container \"7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2\": container with ID starting with 7f19266bb14f2a8a38f6b52c255039f6a41d111170993d1ffcef8ecb3b1ed2a2 not found: ID does not exist" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.992887 4852 scope.go:117] "RemoveContainer" containerID="a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed" Dec 01 20:30:45 crc kubenswrapper[4852]: E1201 20:30:45.993207 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed\": container with ID starting with a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed not found: ID does not exist" containerID="a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.993267 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed"} err="failed to get container status \"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed\": rpc error: code = NotFound desc = could not find container \"a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed\": container with ID starting with a09124052173e621f7dc1d3b71aa6ebe41a2c71e11ec31fc76910b59dab794ed not found: ID does not exist" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.993303 4852 scope.go:117] "RemoveContainer" containerID="670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa" Dec 01 20:30:45 crc kubenswrapper[4852]: E1201 20:30:45.993707 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa\": container with ID starting with 670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa not found: ID does not exist" containerID="670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa" Dec 01 20:30:45 crc kubenswrapper[4852]: I1201 20:30:45.993748 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa"} err="failed to get container status \"670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa\": rpc error: code = NotFound desc = could not find container \"670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa\": container with ID starting with 670445a848e02504d985da1dffd911cea6ad02ccfab800600b8df55dff6be1aa not found: ID does not exist" Dec 01 20:30:46 crc kubenswrapper[4852]: I1201 20:30:46.332889 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" path="/var/lib/kubelet/pods/805639f3-263b-41b4-85ba-1fb9ced8eb46/volumes" Dec 01 20:30:58 crc kubenswrapper[4852]: I1201 20:30:58.320871 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:30:58 crc kubenswrapper[4852]: E1201 20:30:58.321711 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:31:08 crc kubenswrapper[4852]: I1201 20:31:08.103803 4852 generic.go:334] "Generic (PLEG): container finished" podID="03d266a8-6787-4bc8-8836-d11fb0d078b4" containerID="33aba7c803cadb73ec22c694069664884d82f889dd19ba4c1fc5b463f4cf8eaf" exitCode=0 Dec 01 20:31:08 crc kubenswrapper[4852]: I1201 20:31:08.103894 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" event={"ID":"03d266a8-6787-4bc8-8836-d11fb0d078b4","Type":"ContainerDied","Data":"33aba7c803cadb73ec22c694069664884d82f889dd19ba4c1fc5b463f4cf8eaf"} Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.731690 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.902917 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle\") pod \"03d266a8-6787-4bc8-8836-d11fb0d078b4\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.903047 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d27pv\" (UniqueName: \"kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv\") pod \"03d266a8-6787-4bc8-8836-d11fb0d078b4\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.903121 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory\") pod \"03d266a8-6787-4bc8-8836-d11fb0d078b4\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.903251 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key\") pod \"03d266a8-6787-4bc8-8836-d11fb0d078b4\" (UID: \"03d266a8-6787-4bc8-8836-d11fb0d078b4\") " Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.909646 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "03d266a8-6787-4bc8-8836-d11fb0d078b4" (UID: "03d266a8-6787-4bc8-8836-d11fb0d078b4"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.909708 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv" (OuterVolumeSpecName: "kube-api-access-d27pv") pod "03d266a8-6787-4bc8-8836-d11fb0d078b4" (UID: "03d266a8-6787-4bc8-8836-d11fb0d078b4"). InnerVolumeSpecName "kube-api-access-d27pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.931434 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory" (OuterVolumeSpecName: "inventory") pod "03d266a8-6787-4bc8-8836-d11fb0d078b4" (UID: "03d266a8-6787-4bc8-8836-d11fb0d078b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:31:09 crc kubenswrapper[4852]: I1201 20:31:09.936775 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "03d266a8-6787-4bc8-8836-d11fb0d078b4" (UID: "03d266a8-6787-4bc8-8836-d11fb0d078b4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.006533 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.006873 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.006891 4852 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03d266a8-6787-4bc8-8836-d11fb0d078b4-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.006911 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d27pv\" (UniqueName: \"kubernetes.io/projected/03d266a8-6787-4bc8-8836-d11fb0d078b4-kube-api-access-d27pv\") on node \"crc\" DevicePath \"\"" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.128225 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" event={"ID":"03d266a8-6787-4bc8-8836-d11fb0d078b4","Type":"ContainerDied","Data":"eaa194d0496ed45f825031ac0e002b4608f490edacd0455e30111a6c129880fb"} Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.128306 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaa194d0496ed45f825031ac0e002b4608f490edacd0455e30111a6c129880fb" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.128359 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.235983 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls"] Dec 01 20:31:10 crc kubenswrapper[4852]: E1201 20:31:10.236487 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="registry-server" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236508 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="registry-server" Dec 01 20:31:10 crc kubenswrapper[4852]: E1201 20:31:10.236527 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03d266a8-6787-4bc8-8836-d11fb0d078b4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236537 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="03d266a8-6787-4bc8-8836-d11fb0d078b4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:31:10 crc kubenswrapper[4852]: E1201 20:31:10.236554 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="extract-content" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236562 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="extract-content" Dec 01 20:31:10 crc kubenswrapper[4852]: E1201 20:31:10.236605 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="extract-utilities" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236613 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="extract-utilities" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236798 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="805639f3-263b-41b4-85ba-1fb9ced8eb46" containerName="registry-server" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.236827 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="03d266a8-6787-4bc8-8836-d11fb0d078b4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.237481 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.239559 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.239756 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.239896 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.240075 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.259903 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls"] Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.313550 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm2pw\" (UniqueName: \"kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.313633 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.313661 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.415465 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.415629 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm2pw\" (UniqueName: \"kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.415718 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.421400 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.422694 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.433068 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm2pw\" (UniqueName: \"kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7pls\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:10 crc kubenswrapper[4852]: I1201 20:31:10.608389 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:31:11 crc kubenswrapper[4852]: I1201 20:31:11.278872 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls"] Dec 01 20:31:11 crc kubenswrapper[4852]: I1201 20:31:11.320124 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:31:11 crc kubenswrapper[4852]: E1201 20:31:11.320831 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:31:12 crc kubenswrapper[4852]: I1201 20:31:12.157780 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" event={"ID":"f399c1ee-c0af-4085-953e-6333beb90786","Type":"ContainerStarted","Data":"b80625c8aefc2a7ef4ef2dc6758e45e9a51f2ecfbb79d0aac41c09f4e76c6ed1"} Dec 01 20:31:13 crc kubenswrapper[4852]: I1201 20:31:13.168124 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" event={"ID":"f399c1ee-c0af-4085-953e-6333beb90786","Type":"ContainerStarted","Data":"a48ac6bebaa78ad7c7d36fffcc80378e0e886b15faee34e5da12e2234e8c6fa2"} Dec 01 20:31:13 crc kubenswrapper[4852]: I1201 20:31:13.186718 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" podStartSLOduration=2.568183212 podStartE2EDuration="3.186693225s" podCreationTimestamp="2025-12-01 20:31:10 +0000 UTC" firstStartedPulling="2025-12-01 20:31:11.287784884 +0000 UTC m=+1591.214866311" lastFinishedPulling="2025-12-01 20:31:11.906294877 +0000 UTC m=+1591.833376324" observedRunningTime="2025-12-01 20:31:13.18176199 +0000 UTC m=+1593.108843407" watchObservedRunningTime="2025-12-01 20:31:13.186693225 +0000 UTC m=+1593.113774642" Dec 01 20:31:24 crc kubenswrapper[4852]: I1201 20:31:24.320869 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:31:24 crc kubenswrapper[4852]: E1201 20:31:24.321373 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.062234 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tj7jd"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.073813 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1ce5-account-create-update-4nwzp"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.083945 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f1cb-account-create-update-6rb5m"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.093753 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-7wvf6"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.102235 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f1cb-account-create-update-6rb5m"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.110097 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1ce5-account-create-update-4nwzp"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.118148 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-7wvf6"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.126858 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tj7jd"] Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.332769 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04e3b15e-4dde-441e-b414-07ab83f3e23b" path="/var/lib/kubelet/pods/04e3b15e-4dde-441e-b414-07ab83f3e23b/volumes" Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.333530 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d51ca87-b472-4aba-b566-af66d1c18028" path="/var/lib/kubelet/pods/0d51ca87-b472-4aba-b566-af66d1c18028/volumes" Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.334198 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e8835f4-45e5-441c-8039-29193087baca" path="/var/lib/kubelet/pods/3e8835f4-45e5-441c-8039-29193087baca/volumes" Dec 01 20:31:34 crc kubenswrapper[4852]: I1201 20:31:34.334869 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6de166a4-30e1-41db-81c9-024c530a1da3" path="/var/lib/kubelet/pods/6de166a4-30e1-41db-81c9-024c530a1da3/volumes" Dec 01 20:31:39 crc kubenswrapper[4852]: I1201 20:31:39.319967 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:31:39 crc kubenswrapper[4852]: E1201 20:31:39.320627 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.079220 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-6sc57"] Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.094920 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-6sc57"] Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.107190 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-82f7-account-create-update-qbdms"] Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.118565 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-82f7-account-create-update-qbdms"] Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.342914 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b974096d-536c-40f0-962f-a8407408b4c6" path="/var/lib/kubelet/pods/b974096d-536c-40f0-962f-a8407408b4c6/volumes" Dec 01 20:31:40 crc kubenswrapper[4852]: I1201 20:31:40.343666 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8da5478-7e0f-4af2-bb80-213f45977feb" path="/var/lib/kubelet/pods/f8da5478-7e0f-4af2-bb80-213f45977feb/volumes" Dec 01 20:31:50 crc kubenswrapper[4852]: I1201 20:31:50.329046 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:31:50 crc kubenswrapper[4852]: E1201 20:31:50.329924 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.426387 4852 scope.go:117] "RemoveContainer" containerID="104e5ea29caee153b5ee3ffe1d28569b952bf663a2c9b8e7ef6130abab92a43f" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.453535 4852 scope.go:117] "RemoveContainer" containerID="4ef873b4b7747ccc26f786bc85441d4a38cdf09f00ec5e6b5179dd3cb3da5347" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.507277 4852 scope.go:117] "RemoveContainer" containerID="0b6a57b185a79d4a759e2f3b397ebdc56624d2b6dcf863e8134f0416157edf9c" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.555084 4852 scope.go:117] "RemoveContainer" containerID="0431e02fb55423d14e8915bef5785b3d924525d06e5747a6ab37d5f05bd33a6f" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.623911 4852 scope.go:117] "RemoveContainer" containerID="bda90d6ef5d7836105083c869def2442f47d8c99617ddf355fa1dd5a2bb95e11" Dec 01 20:31:55 crc kubenswrapper[4852]: I1201 20:31:55.671965 4852 scope.go:117] "RemoveContainer" containerID="7b4c887a6a74ad33ca4fd69edcdc0d3f0d9358019f7657def2578ad9eb469981" Dec 01 20:32:04 crc kubenswrapper[4852]: I1201 20:32:04.320498 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:32:04 crc kubenswrapper[4852]: E1201 20:32:04.321376 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:32:07 crc kubenswrapper[4852]: I1201 20:32:07.042879 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rl4kg"] Dec 01 20:32:07 crc kubenswrapper[4852]: I1201 20:32:07.051608 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rl4kg"] Dec 01 20:32:08 crc kubenswrapper[4852]: I1201 20:32:08.334859 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c546551-9ac1-4462-a439-3a63b69d678d" path="/var/lib/kubelet/pods/5c546551-9ac1-4462-a439-3a63b69d678d/volumes" Dec 01 20:32:13 crc kubenswrapper[4852]: I1201 20:32:13.053713 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-qs9h7"] Dec 01 20:32:13 crc kubenswrapper[4852]: I1201 20:32:13.078609 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-8x2jx"] Dec 01 20:32:13 crc kubenswrapper[4852]: I1201 20:32:13.095655 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-8x2jx"] Dec 01 20:32:13 crc kubenswrapper[4852]: I1201 20:32:13.118469 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-qs9h7"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.035870 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-89ab-account-create-update-jpkjq"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.050363 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rrvx5"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.062983 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-0a48-account-create-update-r5sp2"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.074791 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1aea-account-create-update-5cmc8"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.082027 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-89ab-account-create-update-jpkjq"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.089345 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1aea-account-create-update-5cmc8"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.096239 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-0a48-account-create-update-r5sp2"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.102948 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rrvx5"] Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.334442 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41f82f81-1abd-4196-9018-aec00a3d1b99" path="/var/lib/kubelet/pods/41f82f81-1abd-4196-9018-aec00a3d1b99/volumes" Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.335279 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4c08895-1b79-4672-ab94-f7d53d17616a" path="/var/lib/kubelet/pods/a4c08895-1b79-4672-ab94-f7d53d17616a/volumes" Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.335840 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa141992-beaa-4f60-993a-1aa6b520aff5" path="/var/lib/kubelet/pods/aa141992-beaa-4f60-993a-1aa6b520aff5/volumes" Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.336397 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcc105dd-813a-4de9-a0a5-b76287276b3b" path="/var/lib/kubelet/pods/dcc105dd-813a-4de9-a0a5-b76287276b3b/volumes" Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.337390 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecb2597e-20f0-472e-9a8e-100301abb115" path="/var/lib/kubelet/pods/ecb2597e-20f0-472e-9a8e-100301abb115/volumes" Dec 01 20:32:14 crc kubenswrapper[4852]: I1201 20:32:14.337930 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f50c4872-2332-4f7d-9df1-99b9b1b639a6" path="/var/lib/kubelet/pods/f50c4872-2332-4f7d-9df1-99b9b1b639a6/volumes" Dec 01 20:32:17 crc kubenswrapper[4852]: I1201 20:32:17.320992 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:32:17 crc kubenswrapper[4852]: E1201 20:32:17.322516 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:32:23 crc kubenswrapper[4852]: I1201 20:32:23.067040 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-mx887"] Dec 01 20:32:23 crc kubenswrapper[4852]: I1201 20:32:23.077065 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-mx887"] Dec 01 20:32:24 crc kubenswrapper[4852]: I1201 20:32:24.340449 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31c7a083-b879-47d4-ad43-67019e4a9ecf" path="/var/lib/kubelet/pods/31c7a083-b879-47d4-ad43-67019e4a9ecf/volumes" Dec 01 20:32:29 crc kubenswrapper[4852]: I1201 20:32:29.320275 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:32:29 crc kubenswrapper[4852]: E1201 20:32:29.320954 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:32:44 crc kubenswrapper[4852]: I1201 20:32:44.320085 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:32:44 crc kubenswrapper[4852]: E1201 20:32:44.320959 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:32:54 crc kubenswrapper[4852]: I1201 20:32:54.062690 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-k8mp8"] Dec 01 20:32:54 crc kubenswrapper[4852]: I1201 20:32:54.077961 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-k8mp8"] Dec 01 20:32:54 crc kubenswrapper[4852]: I1201 20:32:54.333170 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea8042f-95a3-43d9-a653-6c61fc239d8e" path="/var/lib/kubelet/pods/2ea8042f-95a3-43d9-a653-6c61fc239d8e/volumes" Dec 01 20:32:55 crc kubenswrapper[4852]: I1201 20:32:55.821303 4852 scope.go:117] "RemoveContainer" containerID="5627e0a19f80214530e2e4a292a9a7a1350552543765423ed882cd2dcc60aa57" Dec 01 20:32:55 crc kubenswrapper[4852]: I1201 20:32:55.862996 4852 scope.go:117] "RemoveContainer" containerID="ee9613a8690b0e24a9b003b71f5d8f85710cd088eee11004c2292ae6cf3e55cf" Dec 01 20:32:55 crc kubenswrapper[4852]: I1201 20:32:55.935740 4852 scope.go:117] "RemoveContainer" containerID="f9cc7e6eda9b98f182d9b297ad94b79a69bfa3c22c867e9ec1be9592e4067097" Dec 01 20:32:55 crc kubenswrapper[4852]: I1201 20:32:55.978801 4852 scope.go:117] "RemoveContainer" containerID="a59c2abd917cdc0571dc449618c923c4c354ac5e03ae5d0d1080f93684d9de74" Dec 01 20:32:56 crc kubenswrapper[4852]: I1201 20:32:56.044593 4852 scope.go:117] "RemoveContainer" containerID="a5171b95ee3ba14699831acd900e1e8b4fe71e02aefcb21cc2391df28eb08710" Dec 01 20:32:56 crc kubenswrapper[4852]: I1201 20:32:56.066858 4852 scope.go:117] "RemoveContainer" containerID="ab3f717af585ef948d30990430349a3a5e35ef50b8591d8ef133c6ef4b2ac57c" Dec 01 20:32:56 crc kubenswrapper[4852]: I1201 20:32:56.114616 4852 scope.go:117] "RemoveContainer" containerID="6d620a02369483af0fc509c6a7b21e753cd840b93a7997a569ee818908b3c5c2" Dec 01 20:32:56 crc kubenswrapper[4852]: I1201 20:32:56.132850 4852 scope.go:117] "RemoveContainer" containerID="02eafd83064cd0c2af3487eadab0e5997ec94c218567f5bd0a9aacf742161f69" Dec 01 20:32:56 crc kubenswrapper[4852]: I1201 20:32:56.169769 4852 scope.go:117] "RemoveContainer" containerID="27bfeb0b0613393041ce9d634938987e72a639bc472fe920b0746bea0d21bc9a" Dec 01 20:32:57 crc kubenswrapper[4852]: I1201 20:32:57.319797 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:32:57 crc kubenswrapper[4852]: E1201 20:32:57.320320 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:33:02 crc kubenswrapper[4852]: I1201 20:33:02.482162 4852 generic.go:334] "Generic (PLEG): container finished" podID="f399c1ee-c0af-4085-953e-6333beb90786" containerID="a48ac6bebaa78ad7c7d36fffcc80378e0e886b15faee34e5da12e2234e8c6fa2" exitCode=0 Dec 01 20:33:02 crc kubenswrapper[4852]: I1201 20:33:02.482279 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" event={"ID":"f399c1ee-c0af-4085-953e-6333beb90786","Type":"ContainerDied","Data":"a48ac6bebaa78ad7c7d36fffcc80378e0e886b15faee34e5da12e2234e8c6fa2"} Dec 01 20:33:03 crc kubenswrapper[4852]: I1201 20:33:03.979724 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.016815 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory\") pod \"f399c1ee-c0af-4085-953e-6333beb90786\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.016937 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key\") pod \"f399c1ee-c0af-4085-953e-6333beb90786\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.017001 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm2pw\" (UniqueName: \"kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw\") pod \"f399c1ee-c0af-4085-953e-6333beb90786\" (UID: \"f399c1ee-c0af-4085-953e-6333beb90786\") " Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.038776 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw" (OuterVolumeSpecName: "kube-api-access-jm2pw") pod "f399c1ee-c0af-4085-953e-6333beb90786" (UID: "f399c1ee-c0af-4085-953e-6333beb90786"). InnerVolumeSpecName "kube-api-access-jm2pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.069431 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f399c1ee-c0af-4085-953e-6333beb90786" (UID: "f399c1ee-c0af-4085-953e-6333beb90786"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.069809 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory" (OuterVolumeSpecName: "inventory") pod "f399c1ee-c0af-4085-953e-6333beb90786" (UID: "f399c1ee-c0af-4085-953e-6333beb90786"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.119727 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.119763 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f399c1ee-c0af-4085-953e-6333beb90786-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.119774 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm2pw\" (UniqueName: \"kubernetes.io/projected/f399c1ee-c0af-4085-953e-6333beb90786-kube-api-access-jm2pw\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.509431 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" event={"ID":"f399c1ee-c0af-4085-953e-6333beb90786","Type":"ContainerDied","Data":"b80625c8aefc2a7ef4ef2dc6758e45e9a51f2ecfbb79d0aac41c09f4e76c6ed1"} Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.509531 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b80625c8aefc2a7ef4ef2dc6758e45e9a51f2ecfbb79d0aac41c09f4e76c6ed1" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.509528 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7pls" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.613492 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c"] Dec 01 20:33:04 crc kubenswrapper[4852]: E1201 20:33:04.613959 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f399c1ee-c0af-4085-953e-6333beb90786" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.613984 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f399c1ee-c0af-4085-953e-6333beb90786" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.614225 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f399c1ee-c0af-4085-953e-6333beb90786" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.615040 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.617272 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.617604 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.617977 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.618088 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.623491 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c"] Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.733401 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.734091 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.734167 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk87c\" (UniqueName: \"kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.836062 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.836402 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.836566 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk87c\" (UniqueName: \"kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.840830 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.841054 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.854254 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk87c\" (UniqueName: \"kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:04 crc kubenswrapper[4852]: I1201 20:33:04.942566 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.045826 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9mp8s"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.059829 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-fh7zl"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.082739 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-6lqj8"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.096301 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9mp8s"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.105437 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-fh7zl"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.114903 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-6lqj8"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.628975 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c"] Dec 01 20:33:05 crc kubenswrapper[4852]: I1201 20:33:05.636521 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:33:06 crc kubenswrapper[4852]: I1201 20:33:06.337116 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54be94f0-c30e-4a21-9a22-a055b5e6154f" path="/var/lib/kubelet/pods/54be94f0-c30e-4a21-9a22-a055b5e6154f/volumes" Dec 01 20:33:06 crc kubenswrapper[4852]: I1201 20:33:06.338280 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="622dd636-8b05-4a9c-aa97-0fb5fd5d0c58" path="/var/lib/kubelet/pods/622dd636-8b05-4a9c-aa97-0fb5fd5d0c58/volumes" Dec 01 20:33:06 crc kubenswrapper[4852]: I1201 20:33:06.339533 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7feb09eb-34d4-4b20-9904-bcde9ec4a9db" path="/var/lib/kubelet/pods/7feb09eb-34d4-4b20-9904-bcde9ec4a9db/volumes" Dec 01 20:33:06 crc kubenswrapper[4852]: I1201 20:33:06.529433 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" event={"ID":"d7fb0098-9a59-4686-a483-8a1361628214","Type":"ContainerStarted","Data":"f997a18b37e9ac5b2b4def5fcec1371038e8a3a884f061ac6b73e9900279c7e2"} Dec 01 20:33:07 crc kubenswrapper[4852]: I1201 20:33:07.541899 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" event={"ID":"d7fb0098-9a59-4686-a483-8a1361628214","Type":"ContainerStarted","Data":"d2b859f88250a309e80e56a81511a65c42c2e1dd96f0319372ac562df1017001"} Dec 01 20:33:07 crc kubenswrapper[4852]: I1201 20:33:07.572445 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" podStartSLOduration=2.849704472 podStartE2EDuration="3.572425961s" podCreationTimestamp="2025-12-01 20:33:04 +0000 UTC" firstStartedPulling="2025-12-01 20:33:05.636231372 +0000 UTC m=+1705.563312789" lastFinishedPulling="2025-12-01 20:33:06.358952831 +0000 UTC m=+1706.286034278" observedRunningTime="2025-12-01 20:33:07.56951446 +0000 UTC m=+1707.496595877" watchObservedRunningTime="2025-12-01 20:33:07.572425961 +0000 UTC m=+1707.499507368" Dec 01 20:33:12 crc kubenswrapper[4852]: I1201 20:33:12.320643 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:33:12 crc kubenswrapper[4852]: E1201 20:33:12.321394 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:33:19 crc kubenswrapper[4852]: I1201 20:33:19.073324 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-ml69l"] Dec 01 20:33:19 crc kubenswrapper[4852]: I1201 20:33:19.086650 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-ml69l"] Dec 01 20:33:20 crc kubenswrapper[4852]: I1201 20:33:20.347412 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b531141a-eca1-4f9f-a67a-68d48d92add9" path="/var/lib/kubelet/pods/b531141a-eca1-4f9f-a67a-68d48d92add9/volumes" Dec 01 20:33:25 crc kubenswrapper[4852]: I1201 20:33:25.322590 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:33:25 crc kubenswrapper[4852]: E1201 20:33:25.323378 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:33:39 crc kubenswrapper[4852]: I1201 20:33:39.322980 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:33:39 crc kubenswrapper[4852]: E1201 20:33:39.324045 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:33:52 crc kubenswrapper[4852]: I1201 20:33:52.320627 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:33:52 crc kubenswrapper[4852]: E1201 20:33:52.321239 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:33:56 crc kubenswrapper[4852]: I1201 20:33:56.371136 4852 scope.go:117] "RemoveContainer" containerID="0832d29e188960d4ec25f7544e39bea226190242c52c6d164b84ef4bf9af1895" Dec 01 20:33:56 crc kubenswrapper[4852]: I1201 20:33:56.435791 4852 scope.go:117] "RemoveContainer" containerID="dfe745f60839d0ae1dbae3babaae2d3a9388616ba7ba747d914d899ca4204850" Dec 01 20:33:56 crc kubenswrapper[4852]: I1201 20:33:56.487684 4852 scope.go:117] "RemoveContainer" containerID="614f6ee7e96a76d16418a17bed3b88c340405bc2f9c9064b22252542bce3a355" Dec 01 20:33:56 crc kubenswrapper[4852]: I1201 20:33:56.546856 4852 scope.go:117] "RemoveContainer" containerID="c95cc92f4d0bf963052b855977bad0e2abdf2a157fccf7ed9732875bfc4d099b" Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.048419 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-f3b0-account-create-update-vzdnp"] Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.061868 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-gwfcc"] Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.072305 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-qq8bt"] Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.081104 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-f3b0-account-create-update-vzdnp"] Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.087723 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-qq8bt"] Dec 01 20:34:01 crc kubenswrapper[4852]: I1201 20:34:01.094083 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-gwfcc"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.056144 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-g4rk6"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.071256 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ace9-account-create-update-t4g7j"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.081665 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-63da-account-create-update-2dq9v"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.091234 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-g4rk6"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.098358 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ace9-account-create-update-t4g7j"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.105377 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-63da-account-create-update-2dq9v"] Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.334160 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2757bd33-854b-4876-965d-d77359752edb" path="/var/lib/kubelet/pods/2757bd33-854b-4876-965d-d77359752edb/volumes" Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.335271 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31688a6c-099f-466e-b08b-99b67777aadd" path="/var/lib/kubelet/pods/31688a6c-099f-466e-b08b-99b67777aadd/volumes" Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.336421 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477f2357-5a45-4725-b0a1-77f6b5a8eeb3" path="/var/lib/kubelet/pods/477f2357-5a45-4725-b0a1-77f6b5a8eeb3/volumes" Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.337721 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6546f5f4-5cb1-4cb4-a403-1fc050cd9efb" path="/var/lib/kubelet/pods/6546f5f4-5cb1-4cb4-a403-1fc050cd9efb/volumes" Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.339440 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70ae0323-a451-41a9-b76e-899430114d6c" path="/var/lib/kubelet/pods/70ae0323-a451-41a9-b76e-899430114d6c/volumes" Dec 01 20:34:02 crc kubenswrapper[4852]: I1201 20:34:02.340010 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0c09ee4-565f-4b6e-b17f-78defa53bde3" path="/var/lib/kubelet/pods/b0c09ee4-565f-4b6e-b17f-78defa53bde3/volumes" Dec 01 20:34:07 crc kubenswrapper[4852]: I1201 20:34:07.321049 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:34:07 crc kubenswrapper[4852]: E1201 20:34:07.322141 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:34:18 crc kubenswrapper[4852]: I1201 20:34:18.321255 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:34:18 crc kubenswrapper[4852]: E1201 20:34:18.322419 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:34:22 crc kubenswrapper[4852]: I1201 20:34:22.382026 4852 generic.go:334] "Generic (PLEG): container finished" podID="d7fb0098-9a59-4686-a483-8a1361628214" containerID="d2b859f88250a309e80e56a81511a65c42c2e1dd96f0319372ac562df1017001" exitCode=0 Dec 01 20:34:22 crc kubenswrapper[4852]: I1201 20:34:22.382164 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" event={"ID":"d7fb0098-9a59-4686-a483-8a1361628214","Type":"ContainerDied","Data":"d2b859f88250a309e80e56a81511a65c42c2e1dd96f0319372ac562df1017001"} Dec 01 20:34:23 crc kubenswrapper[4852]: I1201 20:34:23.825384 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:34:23 crc kubenswrapper[4852]: I1201 20:34:23.991867 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key\") pod \"d7fb0098-9a59-4686-a483-8a1361628214\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " Dec 01 20:34:23 crc kubenswrapper[4852]: I1201 20:34:23.992106 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory\") pod \"d7fb0098-9a59-4686-a483-8a1361628214\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " Dec 01 20:34:23 crc kubenswrapper[4852]: I1201 20:34:23.992231 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rk87c\" (UniqueName: \"kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c\") pod \"d7fb0098-9a59-4686-a483-8a1361628214\" (UID: \"d7fb0098-9a59-4686-a483-8a1361628214\") " Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.000061 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c" (OuterVolumeSpecName: "kube-api-access-rk87c") pod "d7fb0098-9a59-4686-a483-8a1361628214" (UID: "d7fb0098-9a59-4686-a483-8a1361628214"). InnerVolumeSpecName "kube-api-access-rk87c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.035726 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7fb0098-9a59-4686-a483-8a1361628214" (UID: "d7fb0098-9a59-4686-a483-8a1361628214"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.043183 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory" (OuterVolumeSpecName: "inventory") pod "d7fb0098-9a59-4686-a483-8a1361628214" (UID: "d7fb0098-9a59-4686-a483-8a1361628214"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.095553 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.095822 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7fb0098-9a59-4686-a483-8a1361628214-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.095954 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rk87c\" (UniqueName: \"kubernetes.io/projected/d7fb0098-9a59-4686-a483-8a1361628214-kube-api-access-rk87c\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.401722 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" event={"ID":"d7fb0098-9a59-4686-a483-8a1361628214","Type":"ContainerDied","Data":"f997a18b37e9ac5b2b4def5fcec1371038e8a3a884f061ac6b73e9900279c7e2"} Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.401776 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f997a18b37e9ac5b2b4def5fcec1371038e8a3a884f061ac6b73e9900279c7e2" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.401839 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.493880 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm"] Dec 01 20:34:24 crc kubenswrapper[4852]: E1201 20:34:24.494286 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7fb0098-9a59-4686-a483-8a1361628214" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.494311 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7fb0098-9a59-4686-a483-8a1361628214" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.494575 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7fb0098-9a59-4686-a483-8a1361628214" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.495228 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.498645 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.498832 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.499069 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.499357 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.503894 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm"] Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.604737 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzn5q\" (UniqueName: \"kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.604796 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.604818 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.707060 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzn5q\" (UniqueName: \"kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.707106 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.707124 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.710583 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.711023 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.723143 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzn5q\" (UniqueName: \"kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:24 crc kubenswrapper[4852]: I1201 20:34:24.812972 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:25 crc kubenswrapper[4852]: I1201 20:34:25.419998 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm"] Dec 01 20:34:25 crc kubenswrapper[4852]: W1201 20:34:25.427010 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod991ddb8f_bb11_4661_9604_2663fc221fc8.slice/crio-82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788 WatchSource:0}: Error finding container 82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788: Status 404 returned error can't find the container with id 82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788 Dec 01 20:34:26 crc kubenswrapper[4852]: I1201 20:34:26.438614 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" event={"ID":"991ddb8f-bb11-4661-9604-2663fc221fc8","Type":"ContainerStarted","Data":"ec650ef24de7aec518322bc9c935ad83322be28f4d9292e1118c4cd424741bed"} Dec 01 20:34:26 crc kubenswrapper[4852]: I1201 20:34:26.438945 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" event={"ID":"991ddb8f-bb11-4661-9604-2663fc221fc8","Type":"ContainerStarted","Data":"82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788"} Dec 01 20:34:26 crc kubenswrapper[4852]: I1201 20:34:26.463313 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" podStartSLOduration=1.953018078 podStartE2EDuration="2.463295801s" podCreationTimestamp="2025-12-01 20:34:24 +0000 UTC" firstStartedPulling="2025-12-01 20:34:25.429654817 +0000 UTC m=+1785.356736234" lastFinishedPulling="2025-12-01 20:34:25.93993253 +0000 UTC m=+1785.867013957" observedRunningTime="2025-12-01 20:34:26.455149107 +0000 UTC m=+1786.382230524" watchObservedRunningTime="2025-12-01 20:34:26.463295801 +0000 UTC m=+1786.390377218" Dec 01 20:34:30 crc kubenswrapper[4852]: I1201 20:34:30.066962 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lvtxh"] Dec 01 20:34:30 crc kubenswrapper[4852]: I1201 20:34:30.079282 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lvtxh"] Dec 01 20:34:30 crc kubenswrapper[4852]: I1201 20:34:30.344579 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049d7d92-2f9a-4d85-af6d-46a56c4f4072" path="/var/lib/kubelet/pods/049d7d92-2f9a-4d85-af6d-46a56c4f4072/volumes" Dec 01 20:34:31 crc kubenswrapper[4852]: I1201 20:34:31.500518 4852 generic.go:334] "Generic (PLEG): container finished" podID="991ddb8f-bb11-4661-9604-2663fc221fc8" containerID="ec650ef24de7aec518322bc9c935ad83322be28f4d9292e1118c4cd424741bed" exitCode=0 Dec 01 20:34:31 crc kubenswrapper[4852]: I1201 20:34:31.500619 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" event={"ID":"991ddb8f-bb11-4661-9604-2663fc221fc8","Type":"ContainerDied","Data":"ec650ef24de7aec518322bc9c935ad83322be28f4d9292e1118c4cd424741bed"} Dec 01 20:34:32 crc kubenswrapper[4852]: I1201 20:34:32.320119 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:34:32 crc kubenswrapper[4852]: E1201 20:34:32.320386 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:34:32 crc kubenswrapper[4852]: I1201 20:34:32.956606 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.100304 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzn5q\" (UniqueName: \"kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q\") pod \"991ddb8f-bb11-4661-9604-2663fc221fc8\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.100568 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory\") pod \"991ddb8f-bb11-4661-9604-2663fc221fc8\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.101869 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key\") pod \"991ddb8f-bb11-4661-9604-2663fc221fc8\" (UID: \"991ddb8f-bb11-4661-9604-2663fc221fc8\") " Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.112096 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q" (OuterVolumeSpecName: "kube-api-access-zzn5q") pod "991ddb8f-bb11-4661-9604-2663fc221fc8" (UID: "991ddb8f-bb11-4661-9604-2663fc221fc8"). InnerVolumeSpecName "kube-api-access-zzn5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.144555 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory" (OuterVolumeSpecName: "inventory") pod "991ddb8f-bb11-4661-9604-2663fc221fc8" (UID: "991ddb8f-bb11-4661-9604-2663fc221fc8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.146355 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "991ddb8f-bb11-4661-9604-2663fc221fc8" (UID: "991ddb8f-bb11-4661-9604-2663fc221fc8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.205139 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzn5q\" (UniqueName: \"kubernetes.io/projected/991ddb8f-bb11-4661-9604-2663fc221fc8-kube-api-access-zzn5q\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.205197 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.205211 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/991ddb8f-bb11-4661-9604-2663fc221fc8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.523969 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" event={"ID":"991ddb8f-bb11-4661-9604-2663fc221fc8","Type":"ContainerDied","Data":"82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788"} Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.524413 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82709e29980eb1a7c8c4bc32ea8d2dffa560e51485a21700e8f45d2a1c37c788" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.524057 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.614830 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6"] Dec 01 20:34:33 crc kubenswrapper[4852]: E1201 20:34:33.615313 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="991ddb8f-bb11-4661-9604-2663fc221fc8" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.615330 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="991ddb8f-bb11-4661-9604-2663fc221fc8" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.615601 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="991ddb8f-bb11-4661-9604-2663fc221fc8" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.616586 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.627008 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.627136 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.628720 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.629107 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.631049 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6"] Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.715192 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5vkg\" (UniqueName: \"kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.715280 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.715541 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.817388 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.817761 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.817967 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5vkg\" (UniqueName: \"kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.827375 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.828056 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.843038 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5vkg\" (UniqueName: \"kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pn2q6\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:33 crc kubenswrapper[4852]: I1201 20:34:33.940181 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:34:34 crc kubenswrapper[4852]: I1201 20:34:34.530868 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6"] Dec 01 20:34:35 crc kubenswrapper[4852]: I1201 20:34:35.582712 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" event={"ID":"4055e3d3-767a-4a20-95e4-dda1685cbe61","Type":"ContainerStarted","Data":"38489e187593e1ffa2e165b167a99e38680f00a2d8f24e16d2a8b0ab671d2fcd"} Dec 01 20:34:36 crc kubenswrapper[4852]: I1201 20:34:36.592537 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" event={"ID":"4055e3d3-767a-4a20-95e4-dda1685cbe61","Type":"ContainerStarted","Data":"8bbda3365687e64de096dd5133dd2f2e2e84940a3fafb9302be2af65785b52db"} Dec 01 20:34:36 crc kubenswrapper[4852]: I1201 20:34:36.619983 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" podStartSLOduration=2.6363815539999997 podStartE2EDuration="3.619963802s" podCreationTimestamp="2025-12-01 20:34:33 +0000 UTC" firstStartedPulling="2025-12-01 20:34:34.531819541 +0000 UTC m=+1794.458900958" lastFinishedPulling="2025-12-01 20:34:35.515401779 +0000 UTC m=+1795.442483206" observedRunningTime="2025-12-01 20:34:36.611106885 +0000 UTC m=+1796.538188302" watchObservedRunningTime="2025-12-01 20:34:36.619963802 +0000 UTC m=+1796.547045219" Dec 01 20:34:43 crc kubenswrapper[4852]: I1201 20:34:43.321996 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:34:43 crc kubenswrapper[4852]: E1201 20:34:43.325175 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:34:53 crc kubenswrapper[4852]: I1201 20:34:53.044615 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-cgf5k"] Dec 01 20:34:53 crc kubenswrapper[4852]: I1201 20:34:53.057685 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-cgf5k"] Dec 01 20:34:54 crc kubenswrapper[4852]: I1201 20:34:54.039847 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9bmvk"] Dec 01 20:34:54 crc kubenswrapper[4852]: I1201 20:34:54.051861 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9bmvk"] Dec 01 20:34:54 crc kubenswrapper[4852]: I1201 20:34:54.334082 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f952c0f-3899-4044-b025-a53f29bb3a59" path="/var/lib/kubelet/pods/8f952c0f-3899-4044-b025-a53f29bb3a59/volumes" Dec 01 20:34:54 crc kubenswrapper[4852]: I1201 20:34:54.335284 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d41256a2-ede4-4222-a4e5-1432e3e18e6f" path="/var/lib/kubelet/pods/d41256a2-ede4-4222-a4e5-1432e3e18e6f/volumes" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.690301 4852 scope.go:117] "RemoveContainer" containerID="edea85c8accdf25bbf9241b3ea48036e1a4317e34ef3a8289e54384b72cad418" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.759755 4852 scope.go:117] "RemoveContainer" containerID="566b6bea73bc747292f38185facbf6f05f9445e524a47398ee0825ea80995676" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.841364 4852 scope.go:117] "RemoveContainer" containerID="199787d5304b740bfaae368ca3715767d6a3122959c93412ab50419ed2f53548" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.904439 4852 scope.go:117] "RemoveContainer" containerID="5911fa4781f08ccc500f08d7b6dfcc60fb2eccea30a7fcfec5c3e2e4f047e88f" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.928810 4852 scope.go:117] "RemoveContainer" containerID="860fda961fb837a11c587fc705ab12b1d492c5974fd6c6fa63a4f920fddb65f8" Dec 01 20:34:56 crc kubenswrapper[4852]: I1201 20:34:56.988386 4852 scope.go:117] "RemoveContainer" containerID="7984de1dc8e3b2c722c5914f54901c9685cc19c67b860a61e974b590df0e2a58" Dec 01 20:34:57 crc kubenswrapper[4852]: I1201 20:34:57.046493 4852 scope.go:117] "RemoveContainer" containerID="f32329b1a51582daf5653aecdc1410c54fe2266f973ae1eaabc6270a34ac7a26" Dec 01 20:34:57 crc kubenswrapper[4852]: I1201 20:34:57.067525 4852 scope.go:117] "RemoveContainer" containerID="31440777c6aa2fbc0670d01c36e122863c9678b06b498746b4e38b8a26fef230" Dec 01 20:34:57 crc kubenswrapper[4852]: I1201 20:34:57.086554 4852 scope.go:117] "RemoveContainer" containerID="f7922d0010a7699f282c7f2614bda314b354850b4b6898daa6fdaec5df96fa8e" Dec 01 20:34:58 crc kubenswrapper[4852]: I1201 20:34:58.321342 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:34:58 crc kubenswrapper[4852]: E1201 20:34:58.321901 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:35:13 crc kubenswrapper[4852]: I1201 20:35:13.321434 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:35:13 crc kubenswrapper[4852]: E1201 20:35:13.322346 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:35:16 crc kubenswrapper[4852]: I1201 20:35:16.027167 4852 generic.go:334] "Generic (PLEG): container finished" podID="4055e3d3-767a-4a20-95e4-dda1685cbe61" containerID="8bbda3365687e64de096dd5133dd2f2e2e84940a3fafb9302be2af65785b52db" exitCode=0 Dec 01 20:35:16 crc kubenswrapper[4852]: I1201 20:35:16.027242 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" event={"ID":"4055e3d3-767a-4a20-95e4-dda1685cbe61","Type":"ContainerDied","Data":"8bbda3365687e64de096dd5133dd2f2e2e84940a3fafb9302be2af65785b52db"} Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.569669 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.769851 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory\") pod \"4055e3d3-767a-4a20-95e4-dda1685cbe61\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.770043 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5vkg\" (UniqueName: \"kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg\") pod \"4055e3d3-767a-4a20-95e4-dda1685cbe61\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.770370 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key\") pod \"4055e3d3-767a-4a20-95e4-dda1685cbe61\" (UID: \"4055e3d3-767a-4a20-95e4-dda1685cbe61\") " Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.781652 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg" (OuterVolumeSpecName: "kube-api-access-n5vkg") pod "4055e3d3-767a-4a20-95e4-dda1685cbe61" (UID: "4055e3d3-767a-4a20-95e4-dda1685cbe61"). InnerVolumeSpecName "kube-api-access-n5vkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.804942 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory" (OuterVolumeSpecName: "inventory") pod "4055e3d3-767a-4a20-95e4-dda1685cbe61" (UID: "4055e3d3-767a-4a20-95e4-dda1685cbe61"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.820664 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4055e3d3-767a-4a20-95e4-dda1685cbe61" (UID: "4055e3d3-767a-4a20-95e4-dda1685cbe61"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.873335 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5vkg\" (UniqueName: \"kubernetes.io/projected/4055e3d3-767a-4a20-95e4-dda1685cbe61-kube-api-access-n5vkg\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.873362 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:17 crc kubenswrapper[4852]: I1201 20:35:17.873371 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4055e3d3-767a-4a20-95e4-dda1685cbe61-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.059625 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" event={"ID":"4055e3d3-767a-4a20-95e4-dda1685cbe61","Type":"ContainerDied","Data":"38489e187593e1ffa2e165b167a99e38680f00a2d8f24e16d2a8b0ab671d2fcd"} Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.059687 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38489e187593e1ffa2e165b167a99e38680f00a2d8f24e16d2a8b0ab671d2fcd" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.059706 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pn2q6" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.169767 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr"] Dec 01 20:35:18 crc kubenswrapper[4852]: E1201 20:35:18.170220 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4055e3d3-767a-4a20-95e4-dda1685cbe61" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.170242 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="4055e3d3-767a-4a20-95e4-dda1685cbe61" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.170564 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="4055e3d3-767a-4a20-95e4-dda1685cbe61" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.171317 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.177064 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.178224 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.179118 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.179317 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.196329 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr"] Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.281231 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shmwk\" (UniqueName: \"kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.281302 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.281368 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.388775 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shmwk\" (UniqueName: \"kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.389269 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.389687 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.396310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.402761 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.408547 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shmwk\" (UniqueName: \"kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:18 crc kubenswrapper[4852]: I1201 20:35:18.584689 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:35:19 crc kubenswrapper[4852]: I1201 20:35:19.183248 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr"] Dec 01 20:35:20 crc kubenswrapper[4852]: I1201 20:35:20.082735 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" event={"ID":"5839b9be-5c81-47e2-b392-bf8652b0403e","Type":"ContainerStarted","Data":"c511bda013619ba452d0357374fbf52163b8be6fb6c040b6035676f675264338"} Dec 01 20:35:21 crc kubenswrapper[4852]: I1201 20:35:21.096482 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" event={"ID":"5839b9be-5c81-47e2-b392-bf8652b0403e","Type":"ContainerStarted","Data":"eb87f96fbfd0c905264d5dd27c6f97bf32eace80c4ffc5f4cdd610442c33c22d"} Dec 01 20:35:21 crc kubenswrapper[4852]: I1201 20:35:21.123873 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" podStartSLOduration=1.860470882 podStartE2EDuration="3.123854213s" podCreationTimestamp="2025-12-01 20:35:18 +0000 UTC" firstStartedPulling="2025-12-01 20:35:19.18430746 +0000 UTC m=+1839.111388877" lastFinishedPulling="2025-12-01 20:35:20.447690791 +0000 UTC m=+1840.374772208" observedRunningTime="2025-12-01 20:35:21.116065099 +0000 UTC m=+1841.043146526" watchObservedRunningTime="2025-12-01 20:35:21.123854213 +0000 UTC m=+1841.050935630" Dec 01 20:35:25 crc kubenswrapper[4852]: I1201 20:35:25.320977 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:35:26 crc kubenswrapper[4852]: I1201 20:35:26.152561 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc"} Dec 01 20:35:38 crc kubenswrapper[4852]: I1201 20:35:38.051588 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvjr4"] Dec 01 20:35:38 crc kubenswrapper[4852]: I1201 20:35:38.060251 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvjr4"] Dec 01 20:35:38 crc kubenswrapper[4852]: I1201 20:35:38.334506 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33037a1a-8d04-4fe2-bbe2-cba894655514" path="/var/lib/kubelet/pods/33037a1a-8d04-4fe2-bbe2-cba894655514/volumes" Dec 01 20:35:57 crc kubenswrapper[4852]: I1201 20:35:57.249412 4852 scope.go:117] "RemoveContainer" containerID="63bc5381838417996a2f71982308978172f3a57c81907a974f3246153fcaef90" Dec 01 20:36:13 crc kubenswrapper[4852]: I1201 20:36:13.589847 4852 generic.go:334] "Generic (PLEG): container finished" podID="5839b9be-5c81-47e2-b392-bf8652b0403e" containerID="eb87f96fbfd0c905264d5dd27c6f97bf32eace80c4ffc5f4cdd610442c33c22d" exitCode=0 Dec 01 20:36:13 crc kubenswrapper[4852]: I1201 20:36:13.589954 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" event={"ID":"5839b9be-5c81-47e2-b392-bf8652b0403e","Type":"ContainerDied","Data":"eb87f96fbfd0c905264d5dd27c6f97bf32eace80c4ffc5f4cdd610442c33c22d"} Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.009576 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.211150 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory\") pod \"5839b9be-5c81-47e2-b392-bf8652b0403e\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.211231 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shmwk\" (UniqueName: \"kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk\") pod \"5839b9be-5c81-47e2-b392-bf8652b0403e\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.211377 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key\") pod \"5839b9be-5c81-47e2-b392-bf8652b0403e\" (UID: \"5839b9be-5c81-47e2-b392-bf8652b0403e\") " Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.220635 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk" (OuterVolumeSpecName: "kube-api-access-shmwk") pod "5839b9be-5c81-47e2-b392-bf8652b0403e" (UID: "5839b9be-5c81-47e2-b392-bf8652b0403e"). InnerVolumeSpecName "kube-api-access-shmwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.249908 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory" (OuterVolumeSpecName: "inventory") pod "5839b9be-5c81-47e2-b392-bf8652b0403e" (UID: "5839b9be-5c81-47e2-b392-bf8652b0403e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.252016 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5839b9be-5c81-47e2-b392-bf8652b0403e" (UID: "5839b9be-5c81-47e2-b392-bf8652b0403e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.314810 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.314854 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shmwk\" (UniqueName: \"kubernetes.io/projected/5839b9be-5c81-47e2-b392-bf8652b0403e-kube-api-access-shmwk\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.314867 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5839b9be-5c81-47e2-b392-bf8652b0403e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.612325 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" event={"ID":"5839b9be-5c81-47e2-b392-bf8652b0403e","Type":"ContainerDied","Data":"c511bda013619ba452d0357374fbf52163b8be6fb6c040b6035676f675264338"} Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.612788 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c511bda013619ba452d0357374fbf52163b8be6fb6c040b6035676f675264338" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.612440 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.720911 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w5hlg"] Dec 01 20:36:15 crc kubenswrapper[4852]: E1201 20:36:15.721290 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5839b9be-5c81-47e2-b392-bf8652b0403e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.721312 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5839b9be-5c81-47e2-b392-bf8652b0403e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.721536 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="5839b9be-5c81-47e2-b392-bf8652b0403e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.722197 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.724784 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.725038 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.725234 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.725523 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.751548 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w5hlg"] Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.924500 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.924553 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:15 crc kubenswrapper[4852]: I1201 20:36:15.925271 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdphm\" (UniqueName: \"kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.028102 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdphm\" (UniqueName: \"kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.028330 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.028374 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.035247 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.036359 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.050392 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdphm\" (UniqueName: \"kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm\") pod \"ssh-known-hosts-edpm-deployment-w5hlg\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.054237 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.607266 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w5hlg"] Dec 01 20:36:16 crc kubenswrapper[4852]: I1201 20:36:16.620380 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" event={"ID":"78d28364-d4ba-45c7-be9a-d3a138e64800","Type":"ContainerStarted","Data":"0900791bfa8e4808e41c5fb49065f4ce3df8a846a010395352034d2000b65dc1"} Dec 01 20:36:19 crc kubenswrapper[4852]: I1201 20:36:19.647993 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" event={"ID":"78d28364-d4ba-45c7-be9a-d3a138e64800","Type":"ContainerStarted","Data":"bae5ddd7d39abb327df17c47cd3f49bb919a02c7bf2cad6ae2f66d5f0a4384dc"} Dec 01 20:36:25 crc kubenswrapper[4852]: I1201 20:36:25.707312 4852 generic.go:334] "Generic (PLEG): container finished" podID="78d28364-d4ba-45c7-be9a-d3a138e64800" containerID="bae5ddd7d39abb327df17c47cd3f49bb919a02c7bf2cad6ae2f66d5f0a4384dc" exitCode=0 Dec 01 20:36:25 crc kubenswrapper[4852]: I1201 20:36:25.707495 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" event={"ID":"78d28364-d4ba-45c7-be9a-d3a138e64800","Type":"ContainerDied","Data":"bae5ddd7d39abb327df17c47cd3f49bb919a02c7bf2cad6ae2f66d5f0a4384dc"} Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.180178 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.367761 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam\") pod \"78d28364-d4ba-45c7-be9a-d3a138e64800\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.367901 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdphm\" (UniqueName: \"kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm\") pod \"78d28364-d4ba-45c7-be9a-d3a138e64800\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.367982 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0\") pod \"78d28364-d4ba-45c7-be9a-d3a138e64800\" (UID: \"78d28364-d4ba-45c7-be9a-d3a138e64800\") " Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.380440 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm" (OuterVolumeSpecName: "kube-api-access-xdphm") pod "78d28364-d4ba-45c7-be9a-d3a138e64800" (UID: "78d28364-d4ba-45c7-be9a-d3a138e64800"). InnerVolumeSpecName "kube-api-access-xdphm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.400349 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "78d28364-d4ba-45c7-be9a-d3a138e64800" (UID: "78d28364-d4ba-45c7-be9a-d3a138e64800"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.400789 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "78d28364-d4ba-45c7-be9a-d3a138e64800" (UID: "78d28364-d4ba-45c7-be9a-d3a138e64800"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.470423 4852 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.470475 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/78d28364-d4ba-45c7-be9a-d3a138e64800-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.470487 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdphm\" (UniqueName: \"kubernetes.io/projected/78d28364-d4ba-45c7-be9a-d3a138e64800-kube-api-access-xdphm\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.730206 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" event={"ID":"78d28364-d4ba-45c7-be9a-d3a138e64800","Type":"ContainerDied","Data":"0900791bfa8e4808e41c5fb49065f4ce3df8a846a010395352034d2000b65dc1"} Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.730250 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0900791bfa8e4808e41c5fb49065f4ce3df8a846a010395352034d2000b65dc1" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.730320 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w5hlg" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.842285 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m"] Dec 01 20:36:27 crc kubenswrapper[4852]: E1201 20:36:27.842844 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d28364-d4ba-45c7-be9a-d3a138e64800" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.842868 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d28364-d4ba-45c7-be9a-d3a138e64800" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.843140 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="78d28364-d4ba-45c7-be9a-d3a138e64800" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.844092 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.846477 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.846653 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.847430 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.848411 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.855962 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m"] Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.978522 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.978615 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7hs9\" (UniqueName: \"kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:27 crc kubenswrapper[4852]: I1201 20:36:27.978730 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.080852 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7hs9\" (UniqueName: \"kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.081074 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.081213 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.088497 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.089251 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.108542 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7hs9\" (UniqueName: \"kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fwc7m\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.178832 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:28 crc kubenswrapper[4852]: I1201 20:36:28.808271 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m"] Dec 01 20:36:29 crc kubenswrapper[4852]: I1201 20:36:29.754237 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" event={"ID":"68042211-6c10-446e-bf41-ebfeff2a87ef","Type":"ContainerStarted","Data":"9264f9f2e35ba53446de2fe06acf4cdd615ffe4bc4967f816f066008b9df28f9"} Dec 01 20:36:30 crc kubenswrapper[4852]: I1201 20:36:30.766084 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" event={"ID":"68042211-6c10-446e-bf41-ebfeff2a87ef","Type":"ContainerStarted","Data":"9316c598f7059ad76283c2fb71b2d0f7fed92c333f3637fd97e0e68583c86388"} Dec 01 20:36:30 crc kubenswrapper[4852]: I1201 20:36:30.789285 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" podStartSLOduration=2.508917471 podStartE2EDuration="3.789263334s" podCreationTimestamp="2025-12-01 20:36:27 +0000 UTC" firstStartedPulling="2025-12-01 20:36:28.810681256 +0000 UTC m=+1908.737762673" lastFinishedPulling="2025-12-01 20:36:30.091027089 +0000 UTC m=+1910.018108536" observedRunningTime="2025-12-01 20:36:30.788634064 +0000 UTC m=+1910.715715511" watchObservedRunningTime="2025-12-01 20:36:30.789263334 +0000 UTC m=+1910.716344791" Dec 01 20:36:39 crc kubenswrapper[4852]: I1201 20:36:39.851429 4852 generic.go:334] "Generic (PLEG): container finished" podID="68042211-6c10-446e-bf41-ebfeff2a87ef" containerID="9316c598f7059ad76283c2fb71b2d0f7fed92c333f3637fd97e0e68583c86388" exitCode=0 Dec 01 20:36:39 crc kubenswrapper[4852]: I1201 20:36:39.851494 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" event={"ID":"68042211-6c10-446e-bf41-ebfeff2a87ef","Type":"ContainerDied","Data":"9316c598f7059ad76283c2fb71b2d0f7fed92c333f3637fd97e0e68583c86388"} Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.350133 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.463612 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key\") pod \"68042211-6c10-446e-bf41-ebfeff2a87ef\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.463758 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory\") pod \"68042211-6c10-446e-bf41-ebfeff2a87ef\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.464118 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7hs9\" (UniqueName: \"kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9\") pod \"68042211-6c10-446e-bf41-ebfeff2a87ef\" (UID: \"68042211-6c10-446e-bf41-ebfeff2a87ef\") " Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.468444 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9" (OuterVolumeSpecName: "kube-api-access-g7hs9") pod "68042211-6c10-446e-bf41-ebfeff2a87ef" (UID: "68042211-6c10-446e-bf41-ebfeff2a87ef"). InnerVolumeSpecName "kube-api-access-g7hs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.491167 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory" (OuterVolumeSpecName: "inventory") pod "68042211-6c10-446e-bf41-ebfeff2a87ef" (UID: "68042211-6c10-446e-bf41-ebfeff2a87ef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.493636 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "68042211-6c10-446e-bf41-ebfeff2a87ef" (UID: "68042211-6c10-446e-bf41-ebfeff2a87ef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.565741 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7hs9\" (UniqueName: \"kubernetes.io/projected/68042211-6c10-446e-bf41-ebfeff2a87ef-kube-api-access-g7hs9\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.565776 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.565784 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68042211-6c10-446e-bf41-ebfeff2a87ef-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.881659 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" event={"ID":"68042211-6c10-446e-bf41-ebfeff2a87ef","Type":"ContainerDied","Data":"9264f9f2e35ba53446de2fe06acf4cdd615ffe4bc4967f816f066008b9df28f9"} Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.882126 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9264f9f2e35ba53446de2fe06acf4cdd615ffe4bc4967f816f066008b9df28f9" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.881736 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fwc7m" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.987428 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq"] Dec 01 20:36:41 crc kubenswrapper[4852]: E1201 20:36:41.988077 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68042211-6c10-446e-bf41-ebfeff2a87ef" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.988179 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="68042211-6c10-446e-bf41-ebfeff2a87ef" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:41 crc kubenswrapper[4852]: I1201 20:36:41.988524 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="68042211-6c10-446e-bf41-ebfeff2a87ef" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.006239 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.035384 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.035680 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.036340 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.036515 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.038550 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq"] Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.178737 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.178911 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5jjm\" (UniqueName: \"kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.178974 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.280597 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.280722 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5jjm\" (UniqueName: \"kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.280772 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.285121 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.288973 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.311411 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5jjm\" (UniqueName: \"kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.346549 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.869663 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq"] Dec 01 20:36:42 crc kubenswrapper[4852]: W1201 20:36:42.880794 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23761811_cb87_42a7_b8a4_1ababc02ac47.slice/crio-94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6 WatchSource:0}: Error finding container 94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6: Status 404 returned error can't find the container with id 94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6 Dec 01 20:36:42 crc kubenswrapper[4852]: I1201 20:36:42.892796 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" event={"ID":"23761811-cb87-42a7-b8a4-1ababc02ac47","Type":"ContainerStarted","Data":"94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6"} Dec 01 20:36:43 crc kubenswrapper[4852]: I1201 20:36:43.901131 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" event={"ID":"23761811-cb87-42a7-b8a4-1ababc02ac47","Type":"ContainerStarted","Data":"749b44ab018fec4689d1f846a038e6d0841c1094a4f0a5e892d4a084f32e970b"} Dec 01 20:36:43 crc kubenswrapper[4852]: I1201 20:36:43.915829 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" podStartSLOduration=2.278935494 podStartE2EDuration="2.915806187s" podCreationTimestamp="2025-12-01 20:36:41 +0000 UTC" firstStartedPulling="2025-12-01 20:36:42.884718903 +0000 UTC m=+1922.811800340" lastFinishedPulling="2025-12-01 20:36:43.521589606 +0000 UTC m=+1923.448671033" observedRunningTime="2025-12-01 20:36:43.9152734 +0000 UTC m=+1923.842354847" watchObservedRunningTime="2025-12-01 20:36:43.915806187 +0000 UTC m=+1923.842887614" Dec 01 20:36:55 crc kubenswrapper[4852]: I1201 20:36:55.007439 4852 generic.go:334] "Generic (PLEG): container finished" podID="23761811-cb87-42a7-b8a4-1ababc02ac47" containerID="749b44ab018fec4689d1f846a038e6d0841c1094a4f0a5e892d4a084f32e970b" exitCode=0 Dec 01 20:36:55 crc kubenswrapper[4852]: I1201 20:36:55.007492 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" event={"ID":"23761811-cb87-42a7-b8a4-1ababc02ac47","Type":"ContainerDied","Data":"749b44ab018fec4689d1f846a038e6d0841c1094a4f0a5e892d4a084f32e970b"} Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.478270 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.585137 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory\") pod \"23761811-cb87-42a7-b8a4-1ababc02ac47\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.585750 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5jjm\" (UniqueName: \"kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm\") pod \"23761811-cb87-42a7-b8a4-1ababc02ac47\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.585975 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key\") pod \"23761811-cb87-42a7-b8a4-1ababc02ac47\" (UID: \"23761811-cb87-42a7-b8a4-1ababc02ac47\") " Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.591252 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm" (OuterVolumeSpecName: "kube-api-access-m5jjm") pod "23761811-cb87-42a7-b8a4-1ababc02ac47" (UID: "23761811-cb87-42a7-b8a4-1ababc02ac47"). InnerVolumeSpecName "kube-api-access-m5jjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.619044 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory" (OuterVolumeSpecName: "inventory") pod "23761811-cb87-42a7-b8a4-1ababc02ac47" (UID: "23761811-cb87-42a7-b8a4-1ababc02ac47"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.619139 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23761811-cb87-42a7-b8a4-1ababc02ac47" (UID: "23761811-cb87-42a7-b8a4-1ababc02ac47"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.690309 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.690466 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5jjm\" (UniqueName: \"kubernetes.io/projected/23761811-cb87-42a7-b8a4-1ababc02ac47-kube-api-access-m5jjm\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:56 crc kubenswrapper[4852]: I1201 20:36:56.690490 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23761811-cb87-42a7-b8a4-1ababc02ac47-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.029567 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" event={"ID":"23761811-cb87-42a7-b8a4-1ababc02ac47","Type":"ContainerDied","Data":"94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6"} Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.029609 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94ee11a5d914c5c37cac0bbfb78dede37e1b628a0333486f8ddd9c29f340d9b6" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.029652 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.145709 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw"] Dec 01 20:36:57 crc kubenswrapper[4852]: E1201 20:36:57.146097 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23761811-cb87-42a7-b8a4-1ababc02ac47" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.146122 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="23761811-cb87-42a7-b8a4-1ababc02ac47" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.146388 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="23761811-cb87-42a7-b8a4-1ababc02ac47" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.147184 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.158160 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw"] Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.159183 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.159777 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.159950 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.160082 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.160227 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.160340 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.160465 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.160570 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210556 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210613 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210675 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lfq5\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210744 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210817 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210861 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210894 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210919 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210947 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.210973 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.211064 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.211112 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.211154 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.211183 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.313028 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.313975 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314128 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314200 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314281 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314365 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314505 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314615 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314744 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314836 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.314940 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.315061 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.315229 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lfq5\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.315371 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.318196 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.318770 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.318826 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.319093 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.319093 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.321143 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.321205 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.321348 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.322353 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.324901 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.325310 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.328168 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.330204 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.339411 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lfq5\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-s57rw\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:57 crc kubenswrapper[4852]: I1201 20:36:57.527560 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:36:58 crc kubenswrapper[4852]: I1201 20:36:58.063641 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw"] Dec 01 20:36:59 crc kubenswrapper[4852]: I1201 20:36:59.046951 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" event={"ID":"a0df6c51-df24-475e-b857-39aafce2f093","Type":"ContainerStarted","Data":"7f38c0186b33154bafbd6ecc88ecea8725a090607684e1fe723d44c3fa7a5f50"} Dec 01 20:36:59 crc kubenswrapper[4852]: I1201 20:36:59.047293 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" event={"ID":"a0df6c51-df24-475e-b857-39aafce2f093","Type":"ContainerStarted","Data":"c54eda1e6f11fd5449d2551e508db1d2720e7278efba78e6311a8fc8ac4d09c3"} Dec 01 20:36:59 crc kubenswrapper[4852]: I1201 20:36:59.083969 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" podStartSLOduration=1.405364447 podStartE2EDuration="2.08394857s" podCreationTimestamp="2025-12-01 20:36:57 +0000 UTC" firstStartedPulling="2025-12-01 20:36:58.07709629 +0000 UTC m=+1938.004177707" lastFinishedPulling="2025-12-01 20:36:58.755680413 +0000 UTC m=+1938.682761830" observedRunningTime="2025-12-01 20:36:59.077611123 +0000 UTC m=+1939.004692530" watchObservedRunningTime="2025-12-01 20:36:59.08394857 +0000 UTC m=+1939.011029997" Dec 01 20:37:37 crc kubenswrapper[4852]: I1201 20:37:37.427149 4852 generic.go:334] "Generic (PLEG): container finished" podID="a0df6c51-df24-475e-b857-39aafce2f093" containerID="7f38c0186b33154bafbd6ecc88ecea8725a090607684e1fe723d44c3fa7a5f50" exitCode=0 Dec 01 20:37:37 crc kubenswrapper[4852]: I1201 20:37:37.427232 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" event={"ID":"a0df6c51-df24-475e-b857-39aafce2f093","Type":"ContainerDied","Data":"7f38c0186b33154bafbd6ecc88ecea8725a090607684e1fe723d44c3fa7a5f50"} Dec 01 20:37:38 crc kubenswrapper[4852]: I1201 20:37:38.915587 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.065018 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.065157 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.065202 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066079 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066156 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066212 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066367 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066425 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066541 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lfq5\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066592 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066627 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066700 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066736 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.066789 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"a0df6c51-df24-475e-b857-39aafce2f093\" (UID: \"a0df6c51-df24-475e-b857-39aafce2f093\") " Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.072264 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.072348 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.072788 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.072977 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.073771 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.075565 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.075728 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.075912 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5" (OuterVolumeSpecName: "kube-api-access-2lfq5") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "kube-api-access-2lfq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.076692 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.076790 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.078731 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.079009 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.104010 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.104405 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory" (OuterVolumeSpecName: "inventory") pod "a0df6c51-df24-475e-b857-39aafce2f093" (UID: "a0df6c51-df24-475e-b857-39aafce2f093"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169538 4852 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169597 4852 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169615 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169633 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169646 4852 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169659 4852 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169671 4852 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169682 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lfq5\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-kube-api-access-2lfq5\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169693 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169704 4852 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169716 4852 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169729 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169743 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a0df6c51-df24-475e-b857-39aafce2f093-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.169758 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0df6c51-df24-475e-b857-39aafce2f093-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.452889 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" event={"ID":"a0df6c51-df24-475e-b857-39aafce2f093","Type":"ContainerDied","Data":"c54eda1e6f11fd5449d2551e508db1d2720e7278efba78e6311a8fc8ac4d09c3"} Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.452933 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c54eda1e6f11fd5449d2551e508db1d2720e7278efba78e6311a8fc8ac4d09c3" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.452987 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-s57rw" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.566560 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd"] Dec 01 20:37:39 crc kubenswrapper[4852]: E1201 20:37:39.566994 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0df6c51-df24-475e-b857-39aafce2f093" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.567018 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0df6c51-df24-475e-b857-39aafce2f093" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.567266 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0df6c51-df24-475e-b857-39aafce2f093" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.568045 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.570535 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.571187 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.571614 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.572004 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.578282 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.579527 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.579669 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzpf7\" (UniqueName: \"kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.579930 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.580159 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.580338 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.593631 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd"] Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.683102 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.683623 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.683763 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.683845 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzpf7\" (UniqueName: \"kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.684005 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.685531 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.687678 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.688068 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.690576 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.708884 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzpf7\" (UniqueName: \"kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sphgd\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:39 crc kubenswrapper[4852]: I1201 20:37:39.892641 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:37:40 crc kubenswrapper[4852]: I1201 20:37:40.233997 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd"] Dec 01 20:37:40 crc kubenswrapper[4852]: I1201 20:37:40.469976 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" event={"ID":"9f4a4b48-5ead-42a3-9438-ec9103db3b39","Type":"ContainerStarted","Data":"08dbf1061bb1cdfbd4355848d0fc5d5206d832c1bfaa4343b7ad11ab1a331db9"} Dec 01 20:37:40 crc kubenswrapper[4852]: I1201 20:37:40.792533 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:37:41 crc kubenswrapper[4852]: I1201 20:37:41.481850 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" event={"ID":"9f4a4b48-5ead-42a3-9438-ec9103db3b39","Type":"ContainerStarted","Data":"eeab52bbd93e456d4aadc1f611b769838807a501e6a6d958b2d956e49030fcff"} Dec 01 20:37:41 crc kubenswrapper[4852]: I1201 20:37:41.500702 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" podStartSLOduration=1.9540964920000001 podStartE2EDuration="2.500684256s" podCreationTimestamp="2025-12-01 20:37:39 +0000 UTC" firstStartedPulling="2025-12-01 20:37:40.242921816 +0000 UTC m=+1980.170003233" lastFinishedPulling="2025-12-01 20:37:40.78950954 +0000 UTC m=+1980.716590997" observedRunningTime="2025-12-01 20:37:41.495710022 +0000 UTC m=+1981.422791439" watchObservedRunningTime="2025-12-01 20:37:41.500684256 +0000 UTC m=+1981.427765673" Dec 01 20:37:50 crc kubenswrapper[4852]: I1201 20:37:50.230916 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:37:50 crc kubenswrapper[4852]: I1201 20:37:50.231733 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:38:20 crc kubenswrapper[4852]: I1201 20:38:20.229531 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:38:20 crc kubenswrapper[4852]: I1201 20:38:20.229981 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:38:42 crc kubenswrapper[4852]: I1201 20:38:42.063597 4852 generic.go:334] "Generic (PLEG): container finished" podID="9f4a4b48-5ead-42a3-9438-ec9103db3b39" containerID="eeab52bbd93e456d4aadc1f611b769838807a501e6a6d958b2d956e49030fcff" exitCode=0 Dec 01 20:38:42 crc kubenswrapper[4852]: I1201 20:38:42.063589 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" event={"ID":"9f4a4b48-5ead-42a3-9438-ec9103db3b39","Type":"ContainerDied","Data":"eeab52bbd93e456d4aadc1f611b769838807a501e6a6d958b2d956e49030fcff"} Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.464116 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.635956 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory\") pod \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.636030 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0\") pod \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.636107 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key\") pod \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.636152 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzpf7\" (UniqueName: \"kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7\") pod \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.636264 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle\") pod \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\" (UID: \"9f4a4b48-5ead-42a3-9438-ec9103db3b39\") " Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.647892 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "9f4a4b48-5ead-42a3-9438-ec9103db3b39" (UID: "9f4a4b48-5ead-42a3-9438-ec9103db3b39"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.648329 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7" (OuterVolumeSpecName: "kube-api-access-wzpf7") pod "9f4a4b48-5ead-42a3-9438-ec9103db3b39" (UID: "9f4a4b48-5ead-42a3-9438-ec9103db3b39"). InnerVolumeSpecName "kube-api-access-wzpf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.670169 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "9f4a4b48-5ead-42a3-9438-ec9103db3b39" (UID: "9f4a4b48-5ead-42a3-9438-ec9103db3b39"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.674444 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9f4a4b48-5ead-42a3-9438-ec9103db3b39" (UID: "9f4a4b48-5ead-42a3-9438-ec9103db3b39"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.676520 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory" (OuterVolumeSpecName: "inventory") pod "9f4a4b48-5ead-42a3-9438-ec9103db3b39" (UID: "9f4a4b48-5ead-42a3-9438-ec9103db3b39"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.738152 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.738193 4852 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.738206 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.738216 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzpf7\" (UniqueName: \"kubernetes.io/projected/9f4a4b48-5ead-42a3-9438-ec9103db3b39-kube-api-access-wzpf7\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:43 crc kubenswrapper[4852]: I1201 20:38:43.738229 4852 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f4a4b48-5ead-42a3-9438-ec9103db3b39-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.082183 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" event={"ID":"9f4a4b48-5ead-42a3-9438-ec9103db3b39","Type":"ContainerDied","Data":"08dbf1061bb1cdfbd4355848d0fc5d5206d832c1bfaa4343b7ad11ab1a331db9"} Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.082494 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08dbf1061bb1cdfbd4355848d0fc5d5206d832c1bfaa4343b7ad11ab1a331db9" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.082250 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sphgd" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.185078 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr"] Dec 01 20:38:44 crc kubenswrapper[4852]: E1201 20:38:44.185533 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4a4b48-5ead-42a3-9438-ec9103db3b39" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.185557 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4a4b48-5ead-42a3-9438-ec9103db3b39" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.185818 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f4a4b48-5ead-42a3-9438-ec9103db3b39" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.186733 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.191217 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.191546 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.191871 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.192357 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.192624 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.195228 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.204259 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr"] Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.348710 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.348786 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.348839 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.348953 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.348995 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.349063 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mzvn\" (UniqueName: \"kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.450444 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mzvn\" (UniqueName: \"kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.450764 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.450886 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.451016 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.451189 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.451314 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.454855 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.455020 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.455113 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.455356 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.463983 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.466194 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mzvn\" (UniqueName: \"kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:44 crc kubenswrapper[4852]: I1201 20:38:44.505199 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:38:45 crc kubenswrapper[4852]: I1201 20:38:45.070655 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr"] Dec 01 20:38:45 crc kubenswrapper[4852]: I1201 20:38:45.071375 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:38:45 crc kubenswrapper[4852]: I1201 20:38:45.091669 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" event={"ID":"4f398cd5-1aca-4178-9fcd-50a3bb15bfec","Type":"ContainerStarted","Data":"021ca82d18f7841b0c302c6baebc7e3661e489d9465fa703e794a7224623dc2f"} Dec 01 20:38:48 crc kubenswrapper[4852]: I1201 20:38:48.119073 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" event={"ID":"4f398cd5-1aca-4178-9fcd-50a3bb15bfec","Type":"ContainerStarted","Data":"55b221e4ee767dbc1d09d67400b949c349b4fe2fb2348fafe250a0dd36e25f3d"} Dec 01 20:38:48 crc kubenswrapper[4852]: I1201 20:38:48.148878 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" podStartSLOduration=1.937754419 podStartE2EDuration="4.148858494s" podCreationTimestamp="2025-12-01 20:38:44 +0000 UTC" firstStartedPulling="2025-12-01 20:38:45.071146293 +0000 UTC m=+2044.998227710" lastFinishedPulling="2025-12-01 20:38:47.282250368 +0000 UTC m=+2047.209331785" observedRunningTime="2025-12-01 20:38:48.146549972 +0000 UTC m=+2048.073631409" watchObservedRunningTime="2025-12-01 20:38:48.148858494 +0000 UTC m=+2048.075939901" Dec 01 20:38:50 crc kubenswrapper[4852]: I1201 20:38:50.229590 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:38:50 crc kubenswrapper[4852]: I1201 20:38:50.230569 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:38:50 crc kubenswrapper[4852]: I1201 20:38:50.230639 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:38:50 crc kubenswrapper[4852]: I1201 20:38:50.231725 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:38:50 crc kubenswrapper[4852]: I1201 20:38:50.231826 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc" gracePeriod=600 Dec 01 20:38:51 crc kubenswrapper[4852]: I1201 20:38:51.148260 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc" exitCode=0 Dec 01 20:38:51 crc kubenswrapper[4852]: I1201 20:38:51.148336 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc"} Dec 01 20:38:51 crc kubenswrapper[4852]: I1201 20:38:51.148660 4852 scope.go:117] "RemoveContainer" containerID="0f7955bc1dea4a00f3bcc36b55bc0632cde049a129d25b8f1d23c48fdc569a89" Dec 01 20:38:52 crc kubenswrapper[4852]: I1201 20:38:52.159330 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47"} Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.227618 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.233341 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.250225 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.341296 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.341353 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x9rg\" (UniqueName: \"kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.341410 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.443507 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.443687 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.443729 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x9rg\" (UniqueName: \"kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.444447 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.445605 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.467601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x9rg\" (UniqueName: \"kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg\") pod \"redhat-operators-9wdp8\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:58 crc kubenswrapper[4852]: I1201 20:38:58.559583 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:38:59 crc kubenswrapper[4852]: I1201 20:38:59.072666 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:38:59 crc kubenswrapper[4852]: I1201 20:38:59.226795 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerStarted","Data":"3c680afa66619ff5e016f05b15133a5e1ace0c3915e09942ffe692fd660f0fa9"} Dec 01 20:39:00 crc kubenswrapper[4852]: I1201 20:39:00.243983 4852 generic.go:334] "Generic (PLEG): container finished" podID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerID="77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a" exitCode=0 Dec 01 20:39:00 crc kubenswrapper[4852]: I1201 20:39:00.244111 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerDied","Data":"77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a"} Dec 01 20:39:02 crc kubenswrapper[4852]: I1201 20:39:02.267494 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerStarted","Data":"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb"} Dec 01 20:39:03 crc kubenswrapper[4852]: I1201 20:39:03.278667 4852 generic.go:334] "Generic (PLEG): container finished" podID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerID="e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb" exitCode=0 Dec 01 20:39:03 crc kubenswrapper[4852]: I1201 20:39:03.278719 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerDied","Data":"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb"} Dec 01 20:39:06 crc kubenswrapper[4852]: I1201 20:39:06.311882 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerStarted","Data":"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48"} Dec 01 20:39:06 crc kubenswrapper[4852]: I1201 20:39:06.343807 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9wdp8" podStartSLOduration=2.551417831 podStartE2EDuration="8.343786503s" podCreationTimestamp="2025-12-01 20:38:58 +0000 UTC" firstStartedPulling="2025-12-01 20:39:00.246950004 +0000 UTC m=+2060.174031461" lastFinishedPulling="2025-12-01 20:39:06.039318716 +0000 UTC m=+2065.966400133" observedRunningTime="2025-12-01 20:39:06.3401277 +0000 UTC m=+2066.267209117" watchObservedRunningTime="2025-12-01 20:39:06.343786503 +0000 UTC m=+2066.270867920" Dec 01 20:39:08 crc kubenswrapper[4852]: I1201 20:39:08.560583 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:08 crc kubenswrapper[4852]: I1201 20:39:08.562148 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:09 crc kubenswrapper[4852]: I1201 20:39:09.611676 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9wdp8" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="registry-server" probeResult="failure" output=< Dec 01 20:39:09 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:39:09 crc kubenswrapper[4852]: > Dec 01 20:39:18 crc kubenswrapper[4852]: I1201 20:39:18.608607 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:18 crc kubenswrapper[4852]: I1201 20:39:18.653721 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:18 crc kubenswrapper[4852]: I1201 20:39:18.847273 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.428602 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9wdp8" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="registry-server" containerID="cri-o://511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48" gracePeriod=2 Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.850700 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.991509 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities\") pod \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.991585 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content\") pod \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.991712 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x9rg\" (UniqueName: \"kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg\") pod \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\" (UID: \"8a9fcfeb-f762-45d9-bdcf-c13237a77064\") " Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.992675 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities" (OuterVolumeSpecName: "utilities") pod "8a9fcfeb-f762-45d9-bdcf-c13237a77064" (UID: "8a9fcfeb-f762-45d9-bdcf-c13237a77064"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:39:20 crc kubenswrapper[4852]: I1201 20:39:20.998018 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg" (OuterVolumeSpecName: "kube-api-access-4x9rg") pod "8a9fcfeb-f762-45d9-bdcf-c13237a77064" (UID: "8a9fcfeb-f762-45d9-bdcf-c13237a77064"). InnerVolumeSpecName "kube-api-access-4x9rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.094474 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x9rg\" (UniqueName: \"kubernetes.io/projected/8a9fcfeb-f762-45d9-bdcf-c13237a77064-kube-api-access-4x9rg\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.094526 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.106644 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a9fcfeb-f762-45d9-bdcf-c13237a77064" (UID: "8a9fcfeb-f762-45d9-bdcf-c13237a77064"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.197824 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a9fcfeb-f762-45d9-bdcf-c13237a77064-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.438710 4852 generic.go:334] "Generic (PLEG): container finished" podID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerID="511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48" exitCode=0 Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.438750 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerDied","Data":"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48"} Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.438776 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wdp8" event={"ID":"8a9fcfeb-f762-45d9-bdcf-c13237a77064","Type":"ContainerDied","Data":"3c680afa66619ff5e016f05b15133a5e1ace0c3915e09942ffe692fd660f0fa9"} Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.438793 4852 scope.go:117] "RemoveContainer" containerID="511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.438907 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wdp8" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.491076 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.492144 4852 scope.go:117] "RemoveContainer" containerID="e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.501021 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9wdp8"] Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.516658 4852 scope.go:117] "RemoveContainer" containerID="77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.561826 4852 scope.go:117] "RemoveContainer" containerID="511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48" Dec 01 20:39:21 crc kubenswrapper[4852]: E1201 20:39:21.562071 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48\": container with ID starting with 511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48 not found: ID does not exist" containerID="511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.562101 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48"} err="failed to get container status \"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48\": rpc error: code = NotFound desc = could not find container \"511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48\": container with ID starting with 511df48c1bbd555854ed6e6a2e92d7749710be5e5d916d699770742cfd49ac48 not found: ID does not exist" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.562123 4852 scope.go:117] "RemoveContainer" containerID="e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb" Dec 01 20:39:21 crc kubenswrapper[4852]: E1201 20:39:21.562547 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb\": container with ID starting with e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb not found: ID does not exist" containerID="e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.562570 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb"} err="failed to get container status \"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb\": rpc error: code = NotFound desc = could not find container \"e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb\": container with ID starting with e13e56df1878467c6c3c5d0096252982aef7c0c2acd3cbe27e2e302b0d493ffb not found: ID does not exist" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.562584 4852 scope.go:117] "RemoveContainer" containerID="77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a" Dec 01 20:39:21 crc kubenswrapper[4852]: E1201 20:39:21.562892 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a\": container with ID starting with 77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a not found: ID does not exist" containerID="77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a" Dec 01 20:39:21 crc kubenswrapper[4852]: I1201 20:39:21.562915 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a"} err="failed to get container status \"77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a\": rpc error: code = NotFound desc = could not find container \"77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a\": container with ID starting with 77e1d2a8e942269c59e50393205b9dd7eb072f214293e023f30c27b8d24f012a not found: ID does not exist" Dec 01 20:39:22 crc kubenswrapper[4852]: I1201 20:39:22.331153 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" path="/var/lib/kubelet/pods/8a9fcfeb-f762-45d9-bdcf-c13237a77064/volumes" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.717727 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:25 crc kubenswrapper[4852]: E1201 20:39:25.718806 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="extract-utilities" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.718843 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="extract-utilities" Dec 01 20:39:25 crc kubenswrapper[4852]: E1201 20:39:25.718878 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="extract-content" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.718885 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="extract-content" Dec 01 20:39:25 crc kubenswrapper[4852]: E1201 20:39:25.718904 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="registry-server" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.718912 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="registry-server" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.719120 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a9fcfeb-f762-45d9-bdcf-c13237a77064" containerName="registry-server" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.721278 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.729211 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.889951 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.890477 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5dhm\" (UniqueName: \"kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.890530 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.991821 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.991984 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5dhm\" (UniqueName: \"kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.992019 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.992382 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:25 crc kubenswrapper[4852]: I1201 20:39:25.992481 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:26 crc kubenswrapper[4852]: I1201 20:39:26.013224 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5dhm\" (UniqueName: \"kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm\") pod \"redhat-marketplace-jr78p\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:26 crc kubenswrapper[4852]: I1201 20:39:26.048981 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:26 crc kubenswrapper[4852]: I1201 20:39:26.516017 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:27 crc kubenswrapper[4852]: I1201 20:39:27.486745 4852 generic.go:334] "Generic (PLEG): container finished" podID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerID="ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61" exitCode=0 Dec 01 20:39:27 crc kubenswrapper[4852]: I1201 20:39:27.486840 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerDied","Data":"ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61"} Dec 01 20:39:27 crc kubenswrapper[4852]: I1201 20:39:27.487064 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerStarted","Data":"a68e6ce224375274f02d572c7c62b9ad1b54b1a6c60aa54dbcb93e68576753cc"} Dec 01 20:39:28 crc kubenswrapper[4852]: I1201 20:39:28.497192 4852 generic.go:334] "Generic (PLEG): container finished" podID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerID="8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2" exitCode=0 Dec 01 20:39:28 crc kubenswrapper[4852]: I1201 20:39:28.497278 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerDied","Data":"8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2"} Dec 01 20:39:30 crc kubenswrapper[4852]: I1201 20:39:30.514131 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerStarted","Data":"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8"} Dec 01 20:39:30 crc kubenswrapper[4852]: I1201 20:39:30.535029 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jr78p" podStartSLOduration=3.129986619 podStartE2EDuration="5.53501273s" podCreationTimestamp="2025-12-01 20:39:25 +0000 UTC" firstStartedPulling="2025-12-01 20:39:27.488399228 +0000 UTC m=+2087.415480645" lastFinishedPulling="2025-12-01 20:39:29.893425339 +0000 UTC m=+2089.820506756" observedRunningTime="2025-12-01 20:39:30.530718917 +0000 UTC m=+2090.457800344" watchObservedRunningTime="2025-12-01 20:39:30.53501273 +0000 UTC m=+2090.462094147" Dec 01 20:39:36 crc kubenswrapper[4852]: I1201 20:39:36.050980 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:36 crc kubenswrapper[4852]: I1201 20:39:36.051591 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:36 crc kubenswrapper[4852]: I1201 20:39:36.097229 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:36 crc kubenswrapper[4852]: I1201 20:39:36.607953 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:36 crc kubenswrapper[4852]: I1201 20:39:36.841145 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:37 crc kubenswrapper[4852]: I1201 20:39:37.573405 4852 generic.go:334] "Generic (PLEG): container finished" podID="4f398cd5-1aca-4178-9fcd-50a3bb15bfec" containerID="55b221e4ee767dbc1d09d67400b949c349b4fe2fb2348fafe250a0dd36e25f3d" exitCode=0 Dec 01 20:39:37 crc kubenswrapper[4852]: I1201 20:39:37.574275 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" event={"ID":"4f398cd5-1aca-4178-9fcd-50a3bb15bfec","Type":"ContainerDied","Data":"55b221e4ee767dbc1d09d67400b949c349b4fe2fb2348fafe250a0dd36e25f3d"} Dec 01 20:39:38 crc kubenswrapper[4852]: I1201 20:39:38.582364 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jr78p" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="registry-server" containerID="cri-o://812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8" gracePeriod=2 Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.098344 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245343 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245404 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245496 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mzvn\" (UniqueName: \"kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245592 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245734 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.245787 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0\") pod \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\" (UID: \"4f398cd5-1aca-4178-9fcd-50a3bb15bfec\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.251403 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.252011 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn" (OuterVolumeSpecName: "kube-api-access-6mzvn") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "kube-api-access-6mzvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.278798 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.280116 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.284594 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory" (OuterVolumeSpecName: "inventory") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.286728 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "4f398cd5-1aca-4178-9fcd-50a3bb15bfec" (UID: "4f398cd5-1aca-4178-9fcd-50a3bb15bfec"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347868 4852 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347897 4852 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347907 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347915 4852 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347925 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mzvn\" (UniqueName: \"kubernetes.io/projected/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-kube-api-access-6mzvn\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.347936 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f398cd5-1aca-4178-9fcd-50a3bb15bfec-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.482866 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.593053 4852 generic.go:334] "Generic (PLEG): container finished" podID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerID="812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8" exitCode=0 Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.594427 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerDied","Data":"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8"} Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.594567 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jr78p" event={"ID":"5dd017dd-cb1c-4b1a-a420-95fba3742348","Type":"ContainerDied","Data":"a68e6ce224375274f02d572c7c62b9ad1b54b1a6c60aa54dbcb93e68576753cc"} Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.594696 4852 scope.go:117] "RemoveContainer" containerID="812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.594916 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jr78p" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.599073 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" event={"ID":"4f398cd5-1aca-4178-9fcd-50a3bb15bfec","Type":"ContainerDied","Data":"021ca82d18f7841b0c302c6baebc7e3661e489d9465fa703e794a7224623dc2f"} Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.599119 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="021ca82d18f7841b0c302c6baebc7e3661e489d9465fa703e794a7224623dc2f" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.599182 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.619625 4852 scope.go:117] "RemoveContainer" containerID="8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.649321 4852 scope.go:117] "RemoveContainer" containerID="ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.653151 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities\") pod \"5dd017dd-cb1c-4b1a-a420-95fba3742348\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.653232 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content\") pod \"5dd017dd-cb1c-4b1a-a420-95fba3742348\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.653267 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5dhm\" (UniqueName: \"kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm\") pod \"5dd017dd-cb1c-4b1a-a420-95fba3742348\" (UID: \"5dd017dd-cb1c-4b1a-a420-95fba3742348\") " Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.654013 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities" (OuterVolumeSpecName: "utilities") pod "5dd017dd-cb1c-4b1a-a420-95fba3742348" (UID: "5dd017dd-cb1c-4b1a-a420-95fba3742348"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.657502 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm" (OuterVolumeSpecName: "kube-api-access-r5dhm") pod "5dd017dd-cb1c-4b1a-a420-95fba3742348" (UID: "5dd017dd-cb1c-4b1a-a420-95fba3742348"). InnerVolumeSpecName "kube-api-access-r5dhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.699738 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5dd017dd-cb1c-4b1a-a420-95fba3742348" (UID: "5dd017dd-cb1c-4b1a-a420-95fba3742348"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.718575 4852 scope.go:117] "RemoveContainer" containerID="812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.719059 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8\": container with ID starting with 812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8 not found: ID does not exist" containerID="812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719088 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8"} err="failed to get container status \"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8\": rpc error: code = NotFound desc = could not find container \"812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8\": container with ID starting with 812acf5e6f797c8e648f4f24393ec2e8ae7859c4b89e83eab3d63e6f47f4f2c8 not found: ID does not exist" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719113 4852 scope.go:117] "RemoveContainer" containerID="8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719174 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv"] Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.719642 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="extract-content" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719658 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="extract-content" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.719673 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="registry-server" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719680 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="registry-server" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.719696 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f398cd5-1aca-4178-9fcd-50a3bb15bfec" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719705 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f398cd5-1aca-4178-9fcd-50a3bb15bfec" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.719744 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="extract-utilities" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719752 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="extract-utilities" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.719988 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f398cd5-1aca-4178-9fcd-50a3bb15bfec" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.720013 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" containerName="registry-server" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.720786 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.722297 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2\": container with ID starting with 8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2 not found: ID does not exist" containerID="8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.722340 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2"} err="failed to get container status \"8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2\": rpc error: code = NotFound desc = could not find container \"8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2\": container with ID starting with 8765a2ca5c746023b012a34973b03c49b21dfbabb44ede0a07e0d358c0e7a3f2 not found: ID does not exist" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.722361 4852 scope.go:117] "RemoveContainer" containerID="ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61" Dec 01 20:39:39 crc kubenswrapper[4852]: E1201 20:39:39.722630 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61\": container with ID starting with ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61 not found: ID does not exist" containerID="ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.722653 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61"} err="failed to get container status \"ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61\": rpc error: code = NotFound desc = could not find container \"ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61\": container with ID starting with ca29d73b050a8d3d937b6ff0f468e93a6585e2540ba8ce040177c3c1d4f23e61 not found: ID does not exist" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.724394 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.724648 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.724833 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.724960 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.725087 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.737895 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv"] Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.756031 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.756060 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dd017dd-cb1c-4b1a-a420-95fba3742348-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.756074 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5dhm\" (UniqueName: \"kubernetes.io/projected/5dd017dd-cb1c-4b1a-a420-95fba3742348-kube-api-access-r5dhm\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.857337 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.857387 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cxf\" (UniqueName: \"kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.857419 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.857528 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.857585 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.933014 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.943404 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jr78p"] Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.959693 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cxf\" (UniqueName: \"kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.959811 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.959931 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.960015 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.960091 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.963852 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.964931 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.966275 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.968659 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:39 crc kubenswrapper[4852]: I1201 20:39:39.983770 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cxf\" (UniqueName: \"kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:40 crc kubenswrapper[4852]: I1201 20:39:40.068437 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:39:40 crc kubenswrapper[4852]: I1201 20:39:40.338372 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dd017dd-cb1c-4b1a-a420-95fba3742348" path="/var/lib/kubelet/pods/5dd017dd-cb1c-4b1a-a420-95fba3742348/volumes" Dec 01 20:39:40 crc kubenswrapper[4852]: I1201 20:39:40.612697 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv"] Dec 01 20:39:41 crc kubenswrapper[4852]: I1201 20:39:41.327974 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:39:41 crc kubenswrapper[4852]: I1201 20:39:41.623530 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" event={"ID":"6e87649b-f17b-4067-9803-f1cd06147f82","Type":"ContainerStarted","Data":"90f7b6cd1eae55f33a663da52f1c800e1077c8a28d40b2c0482667055c68ac23"} Dec 01 20:39:42 crc kubenswrapper[4852]: I1201 20:39:42.635321 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" event={"ID":"6e87649b-f17b-4067-9803-f1cd06147f82","Type":"ContainerStarted","Data":"d883d86eecaa579ccdb146eb8b412b258c976d2984b1888a6ba9c93afc61fb20"} Dec 01 20:39:42 crc kubenswrapper[4852]: I1201 20:39:42.660905 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" podStartSLOduration=2.952251547 podStartE2EDuration="3.660879045s" podCreationTimestamp="2025-12-01 20:39:39 +0000 UTC" firstStartedPulling="2025-12-01 20:39:40.617077697 +0000 UTC m=+2100.544159114" lastFinishedPulling="2025-12-01 20:39:41.325705175 +0000 UTC m=+2101.252786612" observedRunningTime="2025-12-01 20:39:42.649193651 +0000 UTC m=+2102.576275058" watchObservedRunningTime="2025-12-01 20:39:42.660879045 +0000 UTC m=+2102.587960502" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.404622 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.410636 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.421293 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.432362 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.433048 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzr48\" (UniqueName: \"kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.433765 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.535512 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzr48\" (UniqueName: \"kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.535633 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.535659 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.536432 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.536640 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.554133 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzr48\" (UniqueName: \"kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48\") pod \"certified-operators-6h7h2\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:48 crc kubenswrapper[4852]: I1201 20:40:48.735843 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:49 crc kubenswrapper[4852]: I1201 20:40:49.239520 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:40:49 crc kubenswrapper[4852]: I1201 20:40:49.263640 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerStarted","Data":"4447b52cba96a56d4ec3d388d4a24ed7ec561ad5d000c13244908d5de466f36f"} Dec 01 20:40:50 crc kubenswrapper[4852]: I1201 20:40:50.280294 4852 generic.go:334] "Generic (PLEG): container finished" podID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerID="bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a" exitCode=0 Dec 01 20:40:50 crc kubenswrapper[4852]: I1201 20:40:50.280407 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerDied","Data":"bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a"} Dec 01 20:40:51 crc kubenswrapper[4852]: I1201 20:40:51.297822 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerStarted","Data":"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279"} Dec 01 20:40:52 crc kubenswrapper[4852]: I1201 20:40:52.312974 4852 generic.go:334] "Generic (PLEG): container finished" podID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerID="891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279" exitCode=0 Dec 01 20:40:52 crc kubenswrapper[4852]: I1201 20:40:52.313025 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerDied","Data":"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279"} Dec 01 20:40:53 crc kubenswrapper[4852]: I1201 20:40:53.325674 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerStarted","Data":"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c"} Dec 01 20:40:53 crc kubenswrapper[4852]: I1201 20:40:53.362303 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6h7h2" podStartSLOduration=2.785381992 podStartE2EDuration="5.362277773s" podCreationTimestamp="2025-12-01 20:40:48 +0000 UTC" firstStartedPulling="2025-12-01 20:40:50.28461415 +0000 UTC m=+2170.211695567" lastFinishedPulling="2025-12-01 20:40:52.861509911 +0000 UTC m=+2172.788591348" observedRunningTime="2025-12-01 20:40:53.351853808 +0000 UTC m=+2173.278935275" watchObservedRunningTime="2025-12-01 20:40:53.362277773 +0000 UTC m=+2173.289359210" Dec 01 20:40:58 crc kubenswrapper[4852]: I1201 20:40:58.736902 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:58 crc kubenswrapper[4852]: I1201 20:40:58.737413 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:58 crc kubenswrapper[4852]: I1201 20:40:58.786384 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:59 crc kubenswrapper[4852]: I1201 20:40:59.441432 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:40:59 crc kubenswrapper[4852]: I1201 20:40:59.506121 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:41:01 crc kubenswrapper[4852]: I1201 20:41:01.404593 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6h7h2" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="registry-server" containerID="cri-o://dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c" gracePeriod=2 Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.362363 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.413413 4852 generic.go:334] "Generic (PLEG): container finished" podID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerID="dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c" exitCode=0 Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.413469 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerDied","Data":"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c"} Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.413501 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h7h2" event={"ID":"b5babafb-a3c2-4948-b273-0d1c73b8bd84","Type":"ContainerDied","Data":"4447b52cba96a56d4ec3d388d4a24ed7ec561ad5d000c13244908d5de466f36f"} Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.413510 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h7h2" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.413525 4852 scope.go:117] "RemoveContainer" containerID="dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.432706 4852 scope.go:117] "RemoveContainer" containerID="891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.455966 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities\") pod \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.456032 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzr48\" (UniqueName: \"kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48\") pod \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.456138 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content\") pod \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\" (UID: \"b5babafb-a3c2-4948-b273-0d1c73b8bd84\") " Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.456667 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities" (OuterVolumeSpecName: "utilities") pod "b5babafb-a3c2-4948-b273-0d1c73b8bd84" (UID: "b5babafb-a3c2-4948-b273-0d1c73b8bd84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.456766 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.459076 4852 scope.go:117] "RemoveContainer" containerID="bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.462099 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48" (OuterVolumeSpecName: "kube-api-access-xzr48") pod "b5babafb-a3c2-4948-b273-0d1c73b8bd84" (UID: "b5babafb-a3c2-4948-b273-0d1c73b8bd84"). InnerVolumeSpecName "kube-api-access-xzr48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.506902 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5babafb-a3c2-4948-b273-0d1c73b8bd84" (UID: "b5babafb-a3c2-4948-b273-0d1c73b8bd84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.533615 4852 scope.go:117] "RemoveContainer" containerID="dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c" Dec 01 20:41:02 crc kubenswrapper[4852]: E1201 20:41:02.534091 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c\": container with ID starting with dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c not found: ID does not exist" containerID="dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.534117 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c"} err="failed to get container status \"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c\": rpc error: code = NotFound desc = could not find container \"dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c\": container with ID starting with dc5b33bb6affd868025f92ca1314f1663c20a5e52ae4712e35f642c4dce2136c not found: ID does not exist" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.534137 4852 scope.go:117] "RemoveContainer" containerID="891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279" Dec 01 20:41:02 crc kubenswrapper[4852]: E1201 20:41:02.535443 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279\": container with ID starting with 891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279 not found: ID does not exist" containerID="891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.535485 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279"} err="failed to get container status \"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279\": rpc error: code = NotFound desc = could not find container \"891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279\": container with ID starting with 891cc2a6db5484483f6be4742f0c53a65b79d803e5560199cc52a047b57ce279 not found: ID does not exist" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.535499 4852 scope.go:117] "RemoveContainer" containerID="bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a" Dec 01 20:41:02 crc kubenswrapper[4852]: E1201 20:41:02.536140 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a\": container with ID starting with bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a not found: ID does not exist" containerID="bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.536163 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a"} err="failed to get container status \"bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a\": rpc error: code = NotFound desc = could not find container \"bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a\": container with ID starting with bc833bdee48e848649e72ff7fba188fa364548e40989d476dc67400e20653a2a not found: ID does not exist" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.558965 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5babafb-a3c2-4948-b273-0d1c73b8bd84-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.559114 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzr48\" (UniqueName: \"kubernetes.io/projected/b5babafb-a3c2-4948-b273-0d1c73b8bd84-kube-api-access-xzr48\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.748500 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:41:02 crc kubenswrapper[4852]: I1201 20:41:02.755618 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6h7h2"] Dec 01 20:41:04 crc kubenswrapper[4852]: I1201 20:41:04.334206 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" path="/var/lib/kubelet/pods/b5babafb-a3c2-4948-b273-0d1c73b8bd84/volumes" Dec 01 20:41:20 crc kubenswrapper[4852]: I1201 20:41:20.230294 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:41:20 crc kubenswrapper[4852]: I1201 20:41:20.230847 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:41:50 crc kubenswrapper[4852]: I1201 20:41:50.229875 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:41:50 crc kubenswrapper[4852]: I1201 20:41:50.230422 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:42:20 crc kubenswrapper[4852]: I1201 20:42:20.229782 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:42:20 crc kubenswrapper[4852]: I1201 20:42:20.230307 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:42:20 crc kubenswrapper[4852]: I1201 20:42:20.230351 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:42:20 crc kubenswrapper[4852]: I1201 20:42:20.231128 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:42:20 crc kubenswrapper[4852]: I1201 20:42:20.231176 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" gracePeriod=600 Dec 01 20:42:20 crc kubenswrapper[4852]: E1201 20:42:20.353121 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:42:21 crc kubenswrapper[4852]: I1201 20:42:21.142726 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" exitCode=0 Dec 01 20:42:21 crc kubenswrapper[4852]: I1201 20:42:21.142819 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47"} Dec 01 20:42:21 crc kubenswrapper[4852]: I1201 20:42:21.143030 4852 scope.go:117] "RemoveContainer" containerID="4b191747f946a199dbabbaca4f4402f2e146190e04126ba381aab66cc1a053dc" Dec 01 20:42:21 crc kubenswrapper[4852]: I1201 20:42:21.143775 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:42:21 crc kubenswrapper[4852]: E1201 20:42:21.144129 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:42:32 crc kubenswrapper[4852]: I1201 20:42:32.320245 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:42:32 crc kubenswrapper[4852]: E1201 20:42:32.320867 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:42:47 crc kubenswrapper[4852]: I1201 20:42:47.319871 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:42:47 crc kubenswrapper[4852]: E1201 20:42:47.320761 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.619487 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:42:52 crc kubenswrapper[4852]: E1201 20:42:52.620669 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="extract-content" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.620688 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="extract-content" Dec 01 20:42:52 crc kubenswrapper[4852]: E1201 20:42:52.620726 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="extract-utilities" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.620738 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="extract-utilities" Dec 01 20:42:52 crc kubenswrapper[4852]: E1201 20:42:52.620750 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="registry-server" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.620763 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="registry-server" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.620997 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5babafb-a3c2-4948-b273-0d1c73b8bd84" containerName="registry-server" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.623206 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.636601 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.649047 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.649101 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smrcg\" (UniqueName: \"kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.649515 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.751483 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.751559 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smrcg\" (UniqueName: \"kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.751761 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.751934 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.752253 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.775049 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smrcg\" (UniqueName: \"kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg\") pod \"community-operators-lfrdv\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:52 crc kubenswrapper[4852]: I1201 20:42:52.983608 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:42:53 crc kubenswrapper[4852]: I1201 20:42:53.536180 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:42:54 crc kubenswrapper[4852]: I1201 20:42:54.505757 4852 generic.go:334] "Generic (PLEG): container finished" podID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerID="b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288" exitCode=0 Dec 01 20:42:54 crc kubenswrapper[4852]: I1201 20:42:54.505875 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerDied","Data":"b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288"} Dec 01 20:42:54 crc kubenswrapper[4852]: I1201 20:42:54.506112 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerStarted","Data":"f1b95b7a233ef8fe9971d1602c1228097b278d6fc0be5ae9b12b35dfc51280b7"} Dec 01 20:42:58 crc kubenswrapper[4852]: I1201 20:42:58.541298 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerStarted","Data":"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6"} Dec 01 20:42:59 crc kubenswrapper[4852]: I1201 20:42:59.553784 4852 generic.go:334] "Generic (PLEG): container finished" podID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerID="b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6" exitCode=0 Dec 01 20:42:59 crc kubenswrapper[4852]: I1201 20:42:59.554161 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerDied","Data":"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6"} Dec 01 20:43:00 crc kubenswrapper[4852]: I1201 20:43:00.326355 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:43:00 crc kubenswrapper[4852]: E1201 20:43:00.327104 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:43:00 crc kubenswrapper[4852]: I1201 20:43:00.564313 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerStarted","Data":"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e"} Dec 01 20:43:00 crc kubenswrapper[4852]: I1201 20:43:00.584254 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lfrdv" podStartSLOduration=3.118721979 podStartE2EDuration="8.584238272s" podCreationTimestamp="2025-12-01 20:42:52 +0000 UTC" firstStartedPulling="2025-12-01 20:42:54.507412519 +0000 UTC m=+2294.434493936" lastFinishedPulling="2025-12-01 20:42:59.972928812 +0000 UTC m=+2299.900010229" observedRunningTime="2025-12-01 20:43:00.582346074 +0000 UTC m=+2300.509427491" watchObservedRunningTime="2025-12-01 20:43:00.584238272 +0000 UTC m=+2300.511319679" Dec 01 20:43:02 crc kubenswrapper[4852]: I1201 20:43:02.985691 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:02 crc kubenswrapper[4852]: I1201 20:43:02.986509 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:03 crc kubenswrapper[4852]: I1201 20:43:03.038920 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:13 crc kubenswrapper[4852]: I1201 20:43:13.087708 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:13 crc kubenswrapper[4852]: I1201 20:43:13.139190 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:43:13 crc kubenswrapper[4852]: I1201 20:43:13.682486 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lfrdv" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="registry-server" containerID="cri-o://4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e" gracePeriod=2 Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.127647 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.206853 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities\") pod \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.206974 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smrcg\" (UniqueName: \"kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg\") pod \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.207062 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content\") pod \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\" (UID: \"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf\") " Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.208316 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities" (OuterVolumeSpecName: "utilities") pod "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" (UID: "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.223869 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg" (OuterVolumeSpecName: "kube-api-access-smrcg") pod "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" (UID: "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf"). InnerVolumeSpecName "kube-api-access-smrcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.262764 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" (UID: "8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.309746 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smrcg\" (UniqueName: \"kubernetes.io/projected/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-kube-api-access-smrcg\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.309801 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.309818 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.699483 4852 generic.go:334] "Generic (PLEG): container finished" podID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerID="4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e" exitCode=0 Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.699534 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerDied","Data":"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e"} Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.699567 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lfrdv" event={"ID":"8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf","Type":"ContainerDied","Data":"f1b95b7a233ef8fe9971d1602c1228097b278d6fc0be5ae9b12b35dfc51280b7"} Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.699595 4852 scope.go:117] "RemoveContainer" containerID="4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.699611 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lfrdv" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.726282 4852 scope.go:117] "RemoveContainer" containerID="b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.741257 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.754390 4852 scope.go:117] "RemoveContainer" containerID="b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.762778 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lfrdv"] Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.798139 4852 scope.go:117] "RemoveContainer" containerID="4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e" Dec 01 20:43:14 crc kubenswrapper[4852]: E1201 20:43:14.798501 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e\": container with ID starting with 4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e not found: ID does not exist" containerID="4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.798543 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e"} err="failed to get container status \"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e\": rpc error: code = NotFound desc = could not find container \"4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e\": container with ID starting with 4a98fc9541f6f372b96123073a52f2a0d597395eef857109ee2e177bbdd5270e not found: ID does not exist" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.798573 4852 scope.go:117] "RemoveContainer" containerID="b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6" Dec 01 20:43:14 crc kubenswrapper[4852]: E1201 20:43:14.798816 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6\": container with ID starting with b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6 not found: ID does not exist" containerID="b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.798842 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6"} err="failed to get container status \"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6\": rpc error: code = NotFound desc = could not find container \"b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6\": container with ID starting with b5f155243a59abfb0248009cd971e84537e6bd3ac0a7538bcf76b7299f80e6b6 not found: ID does not exist" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.798857 4852 scope.go:117] "RemoveContainer" containerID="b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288" Dec 01 20:43:14 crc kubenswrapper[4852]: E1201 20:43:14.799028 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288\": container with ID starting with b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288 not found: ID does not exist" containerID="b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288" Dec 01 20:43:14 crc kubenswrapper[4852]: I1201 20:43:14.799050 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288"} err="failed to get container status \"b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288\": rpc error: code = NotFound desc = could not find container \"b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288\": container with ID starting with b95b6b3b98666956a5e428cbcb7ce290f5f5ac44f955fb919c67a86f253a7288 not found: ID does not exist" Dec 01 20:43:15 crc kubenswrapper[4852]: I1201 20:43:15.320145 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:43:15 crc kubenswrapper[4852]: E1201 20:43:15.320645 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:43:16 crc kubenswrapper[4852]: I1201 20:43:16.336896 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" path="/var/lib/kubelet/pods/8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf/volumes" Dec 01 20:43:30 crc kubenswrapper[4852]: I1201 20:43:30.326382 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:43:30 crc kubenswrapper[4852]: E1201 20:43:30.327268 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:43:43 crc kubenswrapper[4852]: I1201 20:43:43.320259 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:43:43 crc kubenswrapper[4852]: E1201 20:43:43.321352 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:43:56 crc kubenswrapper[4852]: I1201 20:43:56.319851 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:43:56 crc kubenswrapper[4852]: E1201 20:43:56.320626 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:44:06 crc kubenswrapper[4852]: I1201 20:44:06.240413 4852 generic.go:334] "Generic (PLEG): container finished" podID="6e87649b-f17b-4067-9803-f1cd06147f82" containerID="d883d86eecaa579ccdb146eb8b412b258c976d2984b1888a6ba9c93afc61fb20" exitCode=0 Dec 01 20:44:06 crc kubenswrapper[4852]: I1201 20:44:06.240523 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" event={"ID":"6e87649b-f17b-4067-9803-f1cd06147f82","Type":"ContainerDied","Data":"d883d86eecaa579ccdb146eb8b412b258c976d2984b1888a6ba9c93afc61fb20"} Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.695419 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.788785 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key\") pod \"6e87649b-f17b-4067-9803-f1cd06147f82\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.788945 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory\") pod \"6e87649b-f17b-4067-9803-f1cd06147f82\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.789060 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5cxf\" (UniqueName: \"kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf\") pod \"6e87649b-f17b-4067-9803-f1cd06147f82\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.789087 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0\") pod \"6e87649b-f17b-4067-9803-f1cd06147f82\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.789146 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle\") pod \"6e87649b-f17b-4067-9803-f1cd06147f82\" (UID: \"6e87649b-f17b-4067-9803-f1cd06147f82\") " Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.794438 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6e87649b-f17b-4067-9803-f1cd06147f82" (UID: "6e87649b-f17b-4067-9803-f1cd06147f82"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.795475 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf" (OuterVolumeSpecName: "kube-api-access-r5cxf") pod "6e87649b-f17b-4067-9803-f1cd06147f82" (UID: "6e87649b-f17b-4067-9803-f1cd06147f82"). InnerVolumeSpecName "kube-api-access-r5cxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.818539 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "6e87649b-f17b-4067-9803-f1cd06147f82" (UID: "6e87649b-f17b-4067-9803-f1cd06147f82"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.821212 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6e87649b-f17b-4067-9803-f1cd06147f82" (UID: "6e87649b-f17b-4067-9803-f1cd06147f82"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.821524 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory" (OuterVolumeSpecName: "inventory") pod "6e87649b-f17b-4067-9803-f1cd06147f82" (UID: "6e87649b-f17b-4067-9803-f1cd06147f82"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.891669 4852 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.891698 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.891710 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.891721 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5cxf\" (UniqueName: \"kubernetes.io/projected/6e87649b-f17b-4067-9803-f1cd06147f82-kube-api-access-r5cxf\") on node \"crc\" DevicePath \"\"" Dec 01 20:44:07 crc kubenswrapper[4852]: I1201 20:44:07.891735 4852 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6e87649b-f17b-4067-9803-f1cd06147f82-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.260364 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" event={"ID":"6e87649b-f17b-4067-9803-f1cd06147f82","Type":"ContainerDied","Data":"90f7b6cd1eae55f33a663da52f1c800e1077c8a28d40b2c0482667055c68ac23"} Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.260406 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90f7b6cd1eae55f33a663da52f1c800e1077c8a28d40b2c0482667055c68ac23" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.260466 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.372051 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b"] Dec 01 20:44:08 crc kubenswrapper[4852]: E1201 20:44:08.372899 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e87649b-f17b-4067-9803-f1cd06147f82" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.372924 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e87649b-f17b-4067-9803-f1cd06147f82" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:44:08 crc kubenswrapper[4852]: E1201 20:44:08.372935 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="extract-utilities" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.372943 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="extract-utilities" Dec 01 20:44:08 crc kubenswrapper[4852]: E1201 20:44:08.372957 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="extract-content" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.372965 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="extract-content" Dec 01 20:44:08 crc kubenswrapper[4852]: E1201 20:44:08.373006 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="registry-server" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.373017 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="registry-server" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.373258 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e87649b-f17b-4067-9803-f1cd06147f82" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.373285 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b2f0df4-4bdc-43af-8157-0e3e26f2a4bf" containerName="registry-server" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.374039 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.376283 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.376976 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.377340 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.377622 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.377871 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.378154 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.378716 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.390220 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b"] Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.427548 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.427675 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.427742 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j9rc\" (UniqueName: \"kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.427958 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.427988 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.428027 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.428136 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.428203 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.428223 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.529998 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j9rc\" (UniqueName: \"kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530102 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530127 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530155 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530212 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530248 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530270 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530313 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.530355 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.531193 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.534708 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.535081 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.535613 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.535938 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.535952 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.536037 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.536214 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.558204 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j9rc\" (UniqueName: \"kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9669b\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:08 crc kubenswrapper[4852]: I1201 20:44:08.696322 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:44:09 crc kubenswrapper[4852]: I1201 20:44:09.221159 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b"] Dec 01 20:44:09 crc kubenswrapper[4852]: W1201 20:44:09.231254 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaf7fe95_5448_404e_b2f4_7bac25b267db.slice/crio-838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1 WatchSource:0}: Error finding container 838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1: Status 404 returned error can't find the container with id 838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1 Dec 01 20:44:09 crc kubenswrapper[4852]: I1201 20:44:09.234382 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:44:09 crc kubenswrapper[4852]: I1201 20:44:09.275116 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" event={"ID":"aaf7fe95-5448-404e-b2f4-7bac25b267db","Type":"ContainerStarted","Data":"838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1"} Dec 01 20:44:09 crc kubenswrapper[4852]: I1201 20:44:09.320044 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:44:09 crc kubenswrapper[4852]: E1201 20:44:09.320346 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:44:11 crc kubenswrapper[4852]: I1201 20:44:11.296276 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" event={"ID":"aaf7fe95-5448-404e-b2f4-7bac25b267db","Type":"ContainerStarted","Data":"d3c9d69a3b30d0f6264bc927b3d15328ff58fd406b9a6fd29be3b644dda867e9"} Dec 01 20:44:11 crc kubenswrapper[4852]: I1201 20:44:11.319544 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" podStartSLOduration=2.47509454 podStartE2EDuration="3.319526375s" podCreationTimestamp="2025-12-01 20:44:08 +0000 UTC" firstStartedPulling="2025-12-01 20:44:09.234101545 +0000 UTC m=+2369.161182972" lastFinishedPulling="2025-12-01 20:44:10.07853339 +0000 UTC m=+2370.005614807" observedRunningTime="2025-12-01 20:44:11.312412273 +0000 UTC m=+2371.239493710" watchObservedRunningTime="2025-12-01 20:44:11.319526375 +0000 UTC m=+2371.246607802" Dec 01 20:44:20 crc kubenswrapper[4852]: I1201 20:44:20.329202 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:44:20 crc kubenswrapper[4852]: E1201 20:44:20.330211 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:44:32 crc kubenswrapper[4852]: I1201 20:44:32.320081 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:44:32 crc kubenswrapper[4852]: E1201 20:44:32.320901 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:44:45 crc kubenswrapper[4852]: I1201 20:44:45.320736 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:44:45 crc kubenswrapper[4852]: E1201 20:44:45.321677 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:44:58 crc kubenswrapper[4852]: I1201 20:44:58.320441 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:44:58 crc kubenswrapper[4852]: E1201 20:44:58.321111 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.149293 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79"] Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.151201 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.153560 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.157991 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79"] Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.159277 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.222070 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.222254 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.222396 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk29h\" (UniqueName: \"kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.324622 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.324783 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk29h\" (UniqueName: \"kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.324855 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.326325 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.342043 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.344128 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk29h\" (UniqueName: \"kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h\") pod \"collect-profiles-29410365-27s79\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.477238 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:00 crc kubenswrapper[4852]: I1201 20:45:00.916053 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79"] Dec 01 20:45:01 crc kubenswrapper[4852]: I1201 20:45:01.789079 4852 generic.go:334] "Generic (PLEG): container finished" podID="e565a3fb-99ce-4d23-a1be-5038590f3df4" containerID="fd127d37f6fa29bd2d1bea35e31140e292972dfb04e8bb02889d6197dd27913c" exitCode=0 Dec 01 20:45:01 crc kubenswrapper[4852]: I1201 20:45:01.789685 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" event={"ID":"e565a3fb-99ce-4d23-a1be-5038590f3df4","Type":"ContainerDied","Data":"fd127d37f6fa29bd2d1bea35e31140e292972dfb04e8bb02889d6197dd27913c"} Dec 01 20:45:01 crc kubenswrapper[4852]: I1201 20:45:01.789710 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" event={"ID":"e565a3fb-99ce-4d23-a1be-5038590f3df4","Type":"ContainerStarted","Data":"af6b7f26f71b797f6a493ca1e0cd2673556b0bf447f8985a149e5f1018df89c8"} Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.168164 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.276390 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume\") pod \"e565a3fb-99ce-4d23-a1be-5038590f3df4\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.276467 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hk29h\" (UniqueName: \"kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h\") pod \"e565a3fb-99ce-4d23-a1be-5038590f3df4\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.276644 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume\") pod \"e565a3fb-99ce-4d23-a1be-5038590f3df4\" (UID: \"e565a3fb-99ce-4d23-a1be-5038590f3df4\") " Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.277139 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume" (OuterVolumeSpecName: "config-volume") pod "e565a3fb-99ce-4d23-a1be-5038590f3df4" (UID: "e565a3fb-99ce-4d23-a1be-5038590f3df4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.281936 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e565a3fb-99ce-4d23-a1be-5038590f3df4" (UID: "e565a3fb-99ce-4d23-a1be-5038590f3df4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.282152 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h" (OuterVolumeSpecName: "kube-api-access-hk29h") pod "e565a3fb-99ce-4d23-a1be-5038590f3df4" (UID: "e565a3fb-99ce-4d23-a1be-5038590f3df4"). InnerVolumeSpecName "kube-api-access-hk29h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.379165 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e565a3fb-99ce-4d23-a1be-5038590f3df4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.379398 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e565a3fb-99ce-4d23-a1be-5038590f3df4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.379476 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hk29h\" (UniqueName: \"kubernetes.io/projected/e565a3fb-99ce-4d23-a1be-5038590f3df4-kube-api-access-hk29h\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.806601 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" event={"ID":"e565a3fb-99ce-4d23-a1be-5038590f3df4","Type":"ContainerDied","Data":"af6b7f26f71b797f6a493ca1e0cd2673556b0bf447f8985a149e5f1018df89c8"} Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.806929 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af6b7f26f71b797f6a493ca1e0cd2673556b0bf447f8985a149e5f1018df89c8" Dec 01 20:45:03 crc kubenswrapper[4852]: I1201 20:45:03.806651 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-27s79" Dec 01 20:45:04 crc kubenswrapper[4852]: I1201 20:45:04.239825 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd"] Dec 01 20:45:04 crc kubenswrapper[4852]: I1201 20:45:04.248128 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-lm4qd"] Dec 01 20:45:04 crc kubenswrapper[4852]: I1201 20:45:04.333222 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7b65e8c-0f7e-441d-9183-2090247908eb" path="/var/lib/kubelet/pods/b7b65e8c-0f7e-441d-9183-2090247908eb/volumes" Dec 01 20:45:13 crc kubenswrapper[4852]: I1201 20:45:13.320934 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:45:13 crc kubenswrapper[4852]: E1201 20:45:13.321840 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:45:26 crc kubenswrapper[4852]: I1201 20:45:26.320303 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:45:26 crc kubenswrapper[4852]: E1201 20:45:26.321322 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:45:40 crc kubenswrapper[4852]: I1201 20:45:40.336562 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:45:40 crc kubenswrapper[4852]: E1201 20:45:40.337644 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:45:55 crc kubenswrapper[4852]: I1201 20:45:55.322140 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:45:55 crc kubenswrapper[4852]: E1201 20:45:55.322771 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:45:57 crc kubenswrapper[4852]: I1201 20:45:57.599568 4852 scope.go:117] "RemoveContainer" containerID="74faa56205cc80d24ee9cf4e3670a5d3848e0467f1fc9fb0e4df69104019b683" Dec 01 20:46:08 crc kubenswrapper[4852]: I1201 20:46:08.320325 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:46:08 crc kubenswrapper[4852]: E1201 20:46:08.321161 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:46:21 crc kubenswrapper[4852]: I1201 20:46:21.319605 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:46:21 crc kubenswrapper[4852]: E1201 20:46:21.321242 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:46:33 crc kubenswrapper[4852]: I1201 20:46:33.320421 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:46:33 crc kubenswrapper[4852]: E1201 20:46:33.321243 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:46:44 crc kubenswrapper[4852]: I1201 20:46:44.320520 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:46:44 crc kubenswrapper[4852]: E1201 20:46:44.321213 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:46:58 crc kubenswrapper[4852]: I1201 20:46:58.321527 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:46:58 crc kubenswrapper[4852]: E1201 20:46:58.322841 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:46:59 crc kubenswrapper[4852]: I1201 20:46:59.929453 4852 generic.go:334] "Generic (PLEG): container finished" podID="aaf7fe95-5448-404e-b2f4-7bac25b267db" containerID="d3c9d69a3b30d0f6264bc927b3d15328ff58fd406b9a6fd29be3b644dda867e9" exitCode=0 Dec 01 20:46:59 crc kubenswrapper[4852]: I1201 20:46:59.929515 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" event={"ID":"aaf7fe95-5448-404e-b2f4-7bac25b267db","Type":"ContainerDied","Data":"d3c9d69a3b30d0f6264bc927b3d15328ff58fd406b9a6fd29be3b644dda867e9"} Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.370208 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439253 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439362 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439492 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439590 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439647 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439697 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j9rc\" (UniqueName: \"kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439733 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439869 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.439929 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1\") pod \"aaf7fe95-5448-404e-b2f4-7bac25b267db\" (UID: \"aaf7fe95-5448-404e-b2f4-7bac25b267db\") " Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.445438 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.452235 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc" (OuterVolumeSpecName: "kube-api-access-8j9rc") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "kube-api-access-8j9rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.472102 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.477987 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.482239 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.491548 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.494640 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.495746 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.503132 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory" (OuterVolumeSpecName: "inventory") pod "aaf7fe95-5448-404e-b2f4-7bac25b267db" (UID: "aaf7fe95-5448-404e-b2f4-7bac25b267db"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543519 4852 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543575 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543585 4852 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543595 4852 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543606 4852 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543617 4852 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543633 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaf7fe95-5448-404e-b2f4-7bac25b267db-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543643 4852 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/aaf7fe95-5448-404e-b2f4-7bac25b267db-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.543653 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j9rc\" (UniqueName: \"kubernetes.io/projected/aaf7fe95-5448-404e-b2f4-7bac25b267db-kube-api-access-8j9rc\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.952016 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" event={"ID":"aaf7fe95-5448-404e-b2f4-7bac25b267db","Type":"ContainerDied","Data":"838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1"} Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.952120 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="838598518ccb1d8b64d9e8d9cc52b5e3df1bc879d06da659e590739fd536b6c1" Dec 01 20:47:01 crc kubenswrapper[4852]: I1201 20:47:01.952081 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9669b" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.093549 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx"] Dec 01 20:47:02 crc kubenswrapper[4852]: E1201 20:47:02.094957 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf7fe95-5448-404e-b2f4-7bac25b267db" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.095004 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf7fe95-5448-404e-b2f4-7bac25b267db" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:47:02 crc kubenswrapper[4852]: E1201 20:47:02.095026 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e565a3fb-99ce-4d23-a1be-5038590f3df4" containerName="collect-profiles" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.095037 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="e565a3fb-99ce-4d23-a1be-5038590f3df4" containerName="collect-profiles" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.095545 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="e565a3fb-99ce-4d23-a1be-5038590f3df4" containerName="collect-profiles" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.095581 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf7fe95-5448-404e-b2f4-7bac25b267db" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.096684 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.107655 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.108226 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.108415 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mbv9v" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.108589 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.108601 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.117540 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx"] Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.258515 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.258607 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.258676 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.258915 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.258956 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55kmq\" (UniqueName: \"kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.259213 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.259274 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361492 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361636 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361665 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55kmq\" (UniqueName: \"kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361723 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361747 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361829 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.361864 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.365766 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.366385 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.367188 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.370898 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.376714 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.377202 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.380784 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55kmq\" (UniqueName: \"kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:02 crc kubenswrapper[4852]: I1201 20:47:02.437104 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:47:03 crc kubenswrapper[4852]: I1201 20:47:03.040245 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx"] Dec 01 20:47:03 crc kubenswrapper[4852]: I1201 20:47:03.977418 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" event={"ID":"59dae619-1f63-4b50-84ed-037a15a55876","Type":"ContainerStarted","Data":"f6fa9ba5628907fb98db7936ce7e26016931f5bc14105b59e66766765aac35a1"} Dec 01 20:47:03 crc kubenswrapper[4852]: I1201 20:47:03.977738 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" event={"ID":"59dae619-1f63-4b50-84ed-037a15a55876","Type":"ContainerStarted","Data":"69e4ef1cf70c83ca196e2143c3111c5396e0efbcec06854a68bedde6e48e53c7"} Dec 01 20:47:04 crc kubenswrapper[4852]: I1201 20:47:04.011616 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" podStartSLOduration=1.397052883 podStartE2EDuration="2.011597485s" podCreationTimestamp="2025-12-01 20:47:02 +0000 UTC" firstStartedPulling="2025-12-01 20:47:03.044822702 +0000 UTC m=+2542.971904119" lastFinishedPulling="2025-12-01 20:47:03.659367284 +0000 UTC m=+2543.586448721" observedRunningTime="2025-12-01 20:47:04.001662735 +0000 UTC m=+2543.928744192" watchObservedRunningTime="2025-12-01 20:47:04.011597485 +0000 UTC m=+2543.938678922" Dec 01 20:47:13 crc kubenswrapper[4852]: I1201 20:47:13.319967 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:47:13 crc kubenswrapper[4852]: E1201 20:47:13.320669 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:47:24 crc kubenswrapper[4852]: I1201 20:47:24.320395 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:47:25 crc kubenswrapper[4852]: I1201 20:47:25.171076 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b"} Dec 01 20:49:25 crc kubenswrapper[4852]: I1201 20:49:25.323744 4852 generic.go:334] "Generic (PLEG): container finished" podID="59dae619-1f63-4b50-84ed-037a15a55876" containerID="f6fa9ba5628907fb98db7936ce7e26016931f5bc14105b59e66766765aac35a1" exitCode=0 Dec 01 20:49:25 crc kubenswrapper[4852]: I1201 20:49:25.323890 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" event={"ID":"59dae619-1f63-4b50-84ed-037a15a55876","Type":"ContainerDied","Data":"f6fa9ba5628907fb98db7936ce7e26016931f5bc14105b59e66766765aac35a1"} Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.726588 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868636 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55kmq\" (UniqueName: \"kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868680 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868731 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868827 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868870 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868896 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.868929 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1\") pod \"59dae619-1f63-4b50-84ed-037a15a55876\" (UID: \"59dae619-1f63-4b50-84ed-037a15a55876\") " Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.876689 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.883925 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq" (OuterVolumeSpecName: "kube-api-access-55kmq") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "kube-api-access-55kmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.899768 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.902006 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.902862 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.904136 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory" (OuterVolumeSpecName: "inventory") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.910202 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "59dae619-1f63-4b50-84ed-037a15a55876" (UID: "59dae619-1f63-4b50-84ed-037a15a55876"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972444 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55kmq\" (UniqueName: \"kubernetes.io/projected/59dae619-1f63-4b50-84ed-037a15a55876-kube-api-access-55kmq\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972535 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972603 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972624 4852 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972684 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972705 4852 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:26 crc kubenswrapper[4852]: I1201 20:49:26.972766 4852 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/59dae619-1f63-4b50-84ed-037a15a55876-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:49:27 crc kubenswrapper[4852]: I1201 20:49:27.346979 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" event={"ID":"59dae619-1f63-4b50-84ed-037a15a55876","Type":"ContainerDied","Data":"69e4ef1cf70c83ca196e2143c3111c5396e0efbcec06854a68bedde6e48e53c7"} Dec 01 20:49:27 crc kubenswrapper[4852]: I1201 20:49:27.347023 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69e4ef1cf70c83ca196e2143c3111c5396e0efbcec06854a68bedde6e48e53c7" Dec 01 20:49:27 crc kubenswrapper[4852]: I1201 20:49:27.347053 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.754929 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:49:46 crc kubenswrapper[4852]: E1201 20:49:46.755952 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59dae619-1f63-4b50-84ed-037a15a55876" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.755972 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="59dae619-1f63-4b50-84ed-037a15a55876" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.756201 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="59dae619-1f63-4b50-84ed-037a15a55876" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.757840 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.768300 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.859514 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.859587 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th4p5\" (UniqueName: \"kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.859646 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.961509 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.961565 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th4p5\" (UniqueName: \"kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.961595 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.962199 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.962250 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:46 crc kubenswrapper[4852]: I1201 20:49:46.989352 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th4p5\" (UniqueName: \"kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5\") pod \"redhat-marketplace-pq9gb\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:47 crc kubenswrapper[4852]: I1201 20:49:47.095266 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:47 crc kubenswrapper[4852]: I1201 20:49:47.580131 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:49:48 crc kubenswrapper[4852]: I1201 20:49:48.580534 4852 generic.go:334] "Generic (PLEG): container finished" podID="41c90b2b-61cd-4397-83d3-0151354af622" containerID="b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124" exitCode=0 Dec 01 20:49:48 crc kubenswrapper[4852]: I1201 20:49:48.580625 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerDied","Data":"b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124"} Dec 01 20:49:48 crc kubenswrapper[4852]: I1201 20:49:48.580835 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerStarted","Data":"62cf46387e2ecfe89093f279fd8a0aecc187b4f6be8cc2edf52db53e5d9c97b7"} Dec 01 20:49:48 crc kubenswrapper[4852]: I1201 20:49:48.583914 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:49:49 crc kubenswrapper[4852]: I1201 20:49:49.590537 4852 generic.go:334] "Generic (PLEG): container finished" podID="41c90b2b-61cd-4397-83d3-0151354af622" containerID="f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed" exitCode=0 Dec 01 20:49:49 crc kubenswrapper[4852]: I1201 20:49:49.590635 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerDied","Data":"f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed"} Dec 01 20:49:50 crc kubenswrapper[4852]: I1201 20:49:50.229242 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:49:50 crc kubenswrapper[4852]: I1201 20:49:50.229317 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:49:50 crc kubenswrapper[4852]: I1201 20:49:50.612264 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerStarted","Data":"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e"} Dec 01 20:49:50 crc kubenswrapper[4852]: I1201 20:49:50.634969 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pq9gb" podStartSLOduration=3.080075794 podStartE2EDuration="4.634949518s" podCreationTimestamp="2025-12-01 20:49:46 +0000 UTC" firstStartedPulling="2025-12-01 20:49:48.583274072 +0000 UTC m=+2708.510355489" lastFinishedPulling="2025-12-01 20:49:50.138147796 +0000 UTC m=+2710.065229213" observedRunningTime="2025-12-01 20:49:50.626574907 +0000 UTC m=+2710.553656324" watchObservedRunningTime="2025-12-01 20:49:50.634949518 +0000 UTC m=+2710.562030935" Dec 01 20:49:57 crc kubenswrapper[4852]: I1201 20:49:57.095648 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:57 crc kubenswrapper[4852]: I1201 20:49:57.097072 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:57 crc kubenswrapper[4852]: I1201 20:49:57.141504 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:57 crc kubenswrapper[4852]: I1201 20:49:57.758167 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:49:57 crc kubenswrapper[4852]: I1201 20:49:57.811567 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:49:59 crc kubenswrapper[4852]: I1201 20:49:59.701096 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pq9gb" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="registry-server" containerID="cri-o://883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e" gracePeriod=2 Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.203630 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.331216 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities\") pod \"41c90b2b-61cd-4397-83d3-0151354af622\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.331368 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content\") pod \"41c90b2b-61cd-4397-83d3-0151354af622\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.331437 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th4p5\" (UniqueName: \"kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5\") pod \"41c90b2b-61cd-4397-83d3-0151354af622\" (UID: \"41c90b2b-61cd-4397-83d3-0151354af622\") " Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.332424 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities" (OuterVolumeSpecName: "utilities") pod "41c90b2b-61cd-4397-83d3-0151354af622" (UID: "41c90b2b-61cd-4397-83d3-0151354af622"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.341776 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5" (OuterVolumeSpecName: "kube-api-access-th4p5") pod "41c90b2b-61cd-4397-83d3-0151354af622" (UID: "41c90b2b-61cd-4397-83d3-0151354af622"). InnerVolumeSpecName "kube-api-access-th4p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.363014 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41c90b2b-61cd-4397-83d3-0151354af622" (UID: "41c90b2b-61cd-4397-83d3-0151354af622"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.434114 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.434412 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c90b2b-61cd-4397-83d3-0151354af622-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.434427 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th4p5\" (UniqueName: \"kubernetes.io/projected/41c90b2b-61cd-4397-83d3-0151354af622-kube-api-access-th4p5\") on node \"crc\" DevicePath \"\"" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.711890 4852 generic.go:334] "Generic (PLEG): container finished" podID="41c90b2b-61cd-4397-83d3-0151354af622" containerID="883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e" exitCode=0 Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.711936 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerDied","Data":"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e"} Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.711960 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq9gb" event={"ID":"41c90b2b-61cd-4397-83d3-0151354af622","Type":"ContainerDied","Data":"62cf46387e2ecfe89093f279fd8a0aecc187b4f6be8cc2edf52db53e5d9c97b7"} Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.711939 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq9gb" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.711977 4852 scope.go:117] "RemoveContainer" containerID="883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.731604 4852 scope.go:117] "RemoveContainer" containerID="f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.761145 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.770132 4852 scope.go:117] "RemoveContainer" containerID="b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.773123 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq9gb"] Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.828339 4852 scope.go:117] "RemoveContainer" containerID="883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e" Dec 01 20:50:00 crc kubenswrapper[4852]: E1201 20:50:00.828828 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e\": container with ID starting with 883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e not found: ID does not exist" containerID="883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.828868 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e"} err="failed to get container status \"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e\": rpc error: code = NotFound desc = could not find container \"883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e\": container with ID starting with 883335f4e933ac3e38d0c6cf5803326bf9b947ca7528d6cd72450b124423f55e not found: ID does not exist" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.828896 4852 scope.go:117] "RemoveContainer" containerID="f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed" Dec 01 20:50:00 crc kubenswrapper[4852]: E1201 20:50:00.829156 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed\": container with ID starting with f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed not found: ID does not exist" containerID="f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.829187 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed"} err="failed to get container status \"f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed\": rpc error: code = NotFound desc = could not find container \"f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed\": container with ID starting with f9b7a1c672e663bc5e982691a6c015b6414e067b6b7b897902dab7c39ec21eed not found: ID does not exist" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.829207 4852 scope.go:117] "RemoveContainer" containerID="b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124" Dec 01 20:50:00 crc kubenswrapper[4852]: E1201 20:50:00.829548 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124\": container with ID starting with b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124 not found: ID does not exist" containerID="b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124" Dec 01 20:50:00 crc kubenswrapper[4852]: I1201 20:50:00.829601 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124"} err="failed to get container status \"b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124\": rpc error: code = NotFound desc = could not find container \"b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124\": container with ID starting with b517a506622f7ad932a9fd31e80757bfba4d6c9a0204298884f0de164b4ef124 not found: ID does not exist" Dec 01 20:50:02 crc kubenswrapper[4852]: I1201 20:50:02.331352 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c90b2b-61cd-4397-83d3-0151354af622" path="/var/lib/kubelet/pods/41c90b2b-61cd-4397-83d3-0151354af622/volumes" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.210835 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:50:18 crc kubenswrapper[4852]: E1201 20:50:18.211747 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="registry-server" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.211760 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="registry-server" Dec 01 20:50:18 crc kubenswrapper[4852]: E1201 20:50:18.211777 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="extract-utilities" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.211785 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="extract-utilities" Dec 01 20:50:18 crc kubenswrapper[4852]: E1201 20:50:18.211803 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="extract-content" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.211811 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="extract-content" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.212021 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c90b2b-61cd-4397-83d3-0151354af622" containerName="registry-server" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.212697 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.216251 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.216411 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.216557 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.216585 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wxs6t" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.223320 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.269873 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.269996 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270026 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270065 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270100 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270145 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8t76\" (UniqueName: \"kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270226 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270253 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.270281 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.372307 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.372898 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8t76\" (UniqueName: \"kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.372947 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.372981 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373013 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373169 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373286 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373315 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373362 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.373712 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.374939 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.375686 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.375710 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.376242 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.385642 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.387962 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.391026 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8t76\" (UniqueName: \"kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.393816 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.414478 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.540348 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:50:18 crc kubenswrapper[4852]: I1201 20:50:18.998220 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:50:19 crc kubenswrapper[4852]: I1201 20:50:19.918096 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"db1dc4fa-69a3-4c29-b69b-f6080f275e97","Type":"ContainerStarted","Data":"7bbdfcbaa1a10bcb64c78f45b7e529db74bea84e25a43efd3dec6086a8297447"} Dec 01 20:50:20 crc kubenswrapper[4852]: I1201 20:50:20.229191 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:50:20 crc kubenswrapper[4852]: I1201 20:50:20.229619 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:50:50 crc kubenswrapper[4852]: I1201 20:50:50.230172 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:50:50 crc kubenswrapper[4852]: I1201 20:50:50.230833 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:50:50 crc kubenswrapper[4852]: I1201 20:50:50.230878 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:50:50 crc kubenswrapper[4852]: I1201 20:50:50.231723 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:50:50 crc kubenswrapper[4852]: I1201 20:50:50.231789 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b" gracePeriod=600 Dec 01 20:50:51 crc kubenswrapper[4852]: E1201 20:50:51.691642 4852 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 01 20:50:51 crc kubenswrapper[4852]: E1201 20:50:51.692294 4852 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f8t76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(db1dc4fa-69a3-4c29-b69b-f6080f275e97): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:50:51 crc kubenswrapper[4852]: E1201 20:50:51.693850 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" Dec 01 20:50:52 crc kubenswrapper[4852]: I1201 20:50:52.248253 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b" exitCode=0 Dec 01 20:50:52 crc kubenswrapper[4852]: I1201 20:50:52.248812 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b"} Dec 01 20:50:52 crc kubenswrapper[4852]: I1201 20:50:52.248844 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620"} Dec 01 20:50:52 crc kubenswrapper[4852]: I1201 20:50:52.248867 4852 scope.go:117] "RemoveContainer" containerID="634ab0ded60f743f04b8a7da3845c4fb6272b0e1c80560cc33e04cf744a74a47" Dec 01 20:50:52 crc kubenswrapper[4852]: E1201 20:50:52.250412 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" Dec 01 20:50:57 crc kubenswrapper[4852]: I1201 20:50:57.879432 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:50:57 crc kubenswrapper[4852]: I1201 20:50:57.883015 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:57 crc kubenswrapper[4852]: I1201 20:50:57.889489 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.053260 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.053397 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwjt4\" (UniqueName: \"kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.053516 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.155568 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.155712 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwjt4\" (UniqueName: \"kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.155781 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.156232 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.156599 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.182601 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwjt4\" (UniqueName: \"kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4\") pod \"certified-operators-8g444\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.239237 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:50:58 crc kubenswrapper[4852]: I1201 20:50:58.728422 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:50:59 crc kubenswrapper[4852]: I1201 20:50:59.340177 4852 generic.go:334] "Generic (PLEG): container finished" podID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerID="9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0" exitCode=0 Dec 01 20:50:59 crc kubenswrapper[4852]: I1201 20:50:59.340250 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerDied","Data":"9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0"} Dec 01 20:50:59 crc kubenswrapper[4852]: I1201 20:50:59.340297 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerStarted","Data":"630abc41e1c23308f54a3a87d298c8a201545a4f817f740b7a69fa218176028e"} Dec 01 20:51:00 crc kubenswrapper[4852]: I1201 20:51:00.360480 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerStarted","Data":"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b"} Dec 01 20:51:01 crc kubenswrapper[4852]: I1201 20:51:01.399198 4852 generic.go:334] "Generic (PLEG): container finished" podID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerID="8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b" exitCode=0 Dec 01 20:51:01 crc kubenswrapper[4852]: I1201 20:51:01.399260 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerDied","Data":"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b"} Dec 01 20:51:02 crc kubenswrapper[4852]: I1201 20:51:02.411561 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerStarted","Data":"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1"} Dec 01 20:51:02 crc kubenswrapper[4852]: I1201 20:51:02.434644 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8g444" podStartSLOduration=2.967061729 podStartE2EDuration="5.434625762s" podCreationTimestamp="2025-12-01 20:50:57 +0000 UTC" firstStartedPulling="2025-12-01 20:50:59.344060675 +0000 UTC m=+2779.271142092" lastFinishedPulling="2025-12-01 20:51:01.811624678 +0000 UTC m=+2781.738706125" observedRunningTime="2025-12-01 20:51:02.431306179 +0000 UTC m=+2782.358387596" watchObservedRunningTime="2025-12-01 20:51:02.434625762 +0000 UTC m=+2782.361707179" Dec 01 20:51:07 crc kubenswrapper[4852]: I1201 20:51:07.683062 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 20:51:08 crc kubenswrapper[4852]: I1201 20:51:08.240382 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:08 crc kubenswrapper[4852]: I1201 20:51:08.240752 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:08 crc kubenswrapper[4852]: I1201 20:51:08.286549 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:08 crc kubenswrapper[4852]: I1201 20:51:08.527664 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:08 crc kubenswrapper[4852]: I1201 20:51:08.574343 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:51:09 crc kubenswrapper[4852]: I1201 20:51:09.489235 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"db1dc4fa-69a3-4c29-b69b-f6080f275e97","Type":"ContainerStarted","Data":"8ae964a99abb92b126bff8968b6b4d735ca7f4487f3c688f0f16b4085dbc1d5e"} Dec 01 20:51:10 crc kubenswrapper[4852]: I1201 20:51:10.498298 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8g444" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="registry-server" containerID="cri-o://84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1" gracePeriod=2 Dec 01 20:51:10 crc kubenswrapper[4852]: I1201 20:51:10.982508 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.000888 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=5.326207651 podStartE2EDuration="54.000868714s" podCreationTimestamp="2025-12-01 20:50:17 +0000 UTC" firstStartedPulling="2025-12-01 20:50:19.004147776 +0000 UTC m=+2738.931229183" lastFinishedPulling="2025-12-01 20:51:07.678808829 +0000 UTC m=+2787.605890246" observedRunningTime="2025-12-01 20:51:09.511999332 +0000 UTC m=+2789.439080749" watchObservedRunningTime="2025-12-01 20:51:11.000868714 +0000 UTC m=+2790.927950131" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.036954 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities\") pod \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.037476 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content\") pod \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.037551 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwjt4\" (UniqueName: \"kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4\") pod \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\" (UID: \"bfb75e7c-ce15-4500-9d50-ae12d611fe16\") " Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.038424 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities" (OuterVolumeSpecName: "utilities") pod "bfb75e7c-ce15-4500-9d50-ae12d611fe16" (UID: "bfb75e7c-ce15-4500-9d50-ae12d611fe16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.043322 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4" (OuterVolumeSpecName: "kube-api-access-kwjt4") pod "bfb75e7c-ce15-4500-9d50-ae12d611fe16" (UID: "bfb75e7c-ce15-4500-9d50-ae12d611fe16"). InnerVolumeSpecName "kube-api-access-kwjt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.087198 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bfb75e7c-ce15-4500-9d50-ae12d611fe16" (UID: "bfb75e7c-ce15-4500-9d50-ae12d611fe16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.140798 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.141268 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfb75e7c-ce15-4500-9d50-ae12d611fe16-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.141481 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwjt4\" (UniqueName: \"kubernetes.io/projected/bfb75e7c-ce15-4500-9d50-ae12d611fe16-kube-api-access-kwjt4\") on node \"crc\" DevicePath \"\"" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.510903 4852 generic.go:334] "Generic (PLEG): container finished" podID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerID="84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1" exitCode=0 Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.510978 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8g444" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.511049 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerDied","Data":"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1"} Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.511653 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8g444" event={"ID":"bfb75e7c-ce15-4500-9d50-ae12d611fe16","Type":"ContainerDied","Data":"630abc41e1c23308f54a3a87d298c8a201545a4f817f740b7a69fa218176028e"} Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.511724 4852 scope.go:117] "RemoveContainer" containerID="84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.536126 4852 scope.go:117] "RemoveContainer" containerID="8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.570417 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.582827 4852 scope.go:117] "RemoveContainer" containerID="9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.593238 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8g444"] Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.609877 4852 scope.go:117] "RemoveContainer" containerID="84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1" Dec 01 20:51:11 crc kubenswrapper[4852]: E1201 20:51:11.610608 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1\": container with ID starting with 84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1 not found: ID does not exist" containerID="84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.610684 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1"} err="failed to get container status \"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1\": rpc error: code = NotFound desc = could not find container \"84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1\": container with ID starting with 84e476a84d2f6548c2337d18d31992dfdcef4290453560035523f6f1067007e1 not found: ID does not exist" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.610728 4852 scope.go:117] "RemoveContainer" containerID="8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b" Dec 01 20:51:11 crc kubenswrapper[4852]: E1201 20:51:11.611278 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b\": container with ID starting with 8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b not found: ID does not exist" containerID="8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.611315 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b"} err="failed to get container status \"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b\": rpc error: code = NotFound desc = could not find container \"8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b\": container with ID starting with 8eb95d7cbe603941b0bb74311495830f180567fccaab763e0adbfbe4afb0c29b not found: ID does not exist" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.611341 4852 scope.go:117] "RemoveContainer" containerID="9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0" Dec 01 20:51:11 crc kubenswrapper[4852]: E1201 20:51:11.611939 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0\": container with ID starting with 9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0 not found: ID does not exist" containerID="9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0" Dec 01 20:51:11 crc kubenswrapper[4852]: I1201 20:51:11.611967 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0"} err="failed to get container status \"9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0\": rpc error: code = NotFound desc = could not find container \"9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0\": container with ID starting with 9031ec1c7d7a8704ced660c570be209a6a18c9edaef022962fe7e056417303a0 not found: ID does not exist" Dec 01 20:51:12 crc kubenswrapper[4852]: I1201 20:51:12.337368 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" path="/var/lib/kubelet/pods/bfb75e7c-ce15-4500-9d50-ae12d611fe16/volumes" Dec 01 20:53:20 crc kubenswrapper[4852]: I1201 20:53:20.229970 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:53:20 crc kubenswrapper[4852]: I1201 20:53:20.230514 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.866519 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:35 crc kubenswrapper[4852]: E1201 20:53:35.867442 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="extract-content" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.867472 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="extract-content" Dec 01 20:53:35 crc kubenswrapper[4852]: E1201 20:53:35.867489 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="registry-server" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.867495 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="registry-server" Dec 01 20:53:35 crc kubenswrapper[4852]: E1201 20:53:35.867508 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="extract-utilities" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.867516 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="extract-utilities" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.867720 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb75e7c-ce15-4500-9d50-ae12d611fe16" containerName="registry-server" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.869235 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.900323 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.936403 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.936616 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:35 crc kubenswrapper[4852]: I1201 20:53:35.936952 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvpk5\" (UniqueName: \"kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.038779 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvpk5\" (UniqueName: \"kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.038891 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.038948 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.039381 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.039616 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.058623 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvpk5\" (UniqueName: \"kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5\") pod \"community-operators-zj4j4\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.223339 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:36 crc kubenswrapper[4852]: I1201 20:53:36.766029 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:37 crc kubenswrapper[4852]: I1201 20:53:37.006223 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerStarted","Data":"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10"} Dec 01 20:53:37 crc kubenswrapper[4852]: I1201 20:53:37.006279 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerStarted","Data":"4eac3de1750688e8253012eef881f2a353aae840fc1f4d0bedc4eb898d92057e"} Dec 01 20:53:38 crc kubenswrapper[4852]: I1201 20:53:38.017863 4852 generic.go:334] "Generic (PLEG): container finished" podID="372942fb-f212-4085-8ac6-10282c18cef6" containerID="d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10" exitCode=0 Dec 01 20:53:38 crc kubenswrapper[4852]: I1201 20:53:38.018142 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerDied","Data":"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10"} Dec 01 20:53:39 crc kubenswrapper[4852]: I1201 20:53:39.027896 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerStarted","Data":"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1"} Dec 01 20:53:40 crc kubenswrapper[4852]: I1201 20:53:40.044316 4852 generic.go:334] "Generic (PLEG): container finished" podID="372942fb-f212-4085-8ac6-10282c18cef6" containerID="9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1" exitCode=0 Dec 01 20:53:40 crc kubenswrapper[4852]: I1201 20:53:40.044597 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerDied","Data":"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1"} Dec 01 20:53:41 crc kubenswrapper[4852]: I1201 20:53:41.054433 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerStarted","Data":"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3"} Dec 01 20:53:41 crc kubenswrapper[4852]: I1201 20:53:41.075541 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zj4j4" podStartSLOduration=3.548612718 podStartE2EDuration="6.075519496s" podCreationTimestamp="2025-12-01 20:53:35 +0000 UTC" firstStartedPulling="2025-12-01 20:53:38.019928845 +0000 UTC m=+2937.947010262" lastFinishedPulling="2025-12-01 20:53:40.546835633 +0000 UTC m=+2940.473917040" observedRunningTime="2025-12-01 20:53:41.070945494 +0000 UTC m=+2940.998026921" watchObservedRunningTime="2025-12-01 20:53:41.075519496 +0000 UTC m=+2941.002600913" Dec 01 20:53:46 crc kubenswrapper[4852]: I1201 20:53:46.225118 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:46 crc kubenswrapper[4852]: I1201 20:53:46.225726 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:46 crc kubenswrapper[4852]: I1201 20:53:46.298683 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:47 crc kubenswrapper[4852]: I1201 20:53:47.152046 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:47 crc kubenswrapper[4852]: I1201 20:53:47.203974 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.119532 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zj4j4" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="registry-server" containerID="cri-o://b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3" gracePeriod=2 Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.651026 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.708364 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvpk5\" (UniqueName: \"kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5\") pod \"372942fb-f212-4085-8ac6-10282c18cef6\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.708408 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities\") pod \"372942fb-f212-4085-8ac6-10282c18cef6\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.708585 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content\") pod \"372942fb-f212-4085-8ac6-10282c18cef6\" (UID: \"372942fb-f212-4085-8ac6-10282c18cef6\") " Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.710403 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities" (OuterVolumeSpecName: "utilities") pod "372942fb-f212-4085-8ac6-10282c18cef6" (UID: "372942fb-f212-4085-8ac6-10282c18cef6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.720801 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5" (OuterVolumeSpecName: "kube-api-access-jvpk5") pod "372942fb-f212-4085-8ac6-10282c18cef6" (UID: "372942fb-f212-4085-8ac6-10282c18cef6"). InnerVolumeSpecName "kube-api-access-jvpk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.763628 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "372942fb-f212-4085-8ac6-10282c18cef6" (UID: "372942fb-f212-4085-8ac6-10282c18cef6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.811103 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvpk5\" (UniqueName: \"kubernetes.io/projected/372942fb-f212-4085-8ac6-10282c18cef6-kube-api-access-jvpk5\") on node \"crc\" DevicePath \"\"" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.811184 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:53:49 crc kubenswrapper[4852]: I1201 20:53:49.811195 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372942fb-f212-4085-8ac6-10282c18cef6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.131968 4852 generic.go:334] "Generic (PLEG): container finished" podID="372942fb-f212-4085-8ac6-10282c18cef6" containerID="b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3" exitCode=0 Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.132041 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zj4j4" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.132043 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerDied","Data":"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3"} Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.132180 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zj4j4" event={"ID":"372942fb-f212-4085-8ac6-10282c18cef6","Type":"ContainerDied","Data":"4eac3de1750688e8253012eef881f2a353aae840fc1f4d0bedc4eb898d92057e"} Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.132213 4852 scope.go:117] "RemoveContainer" containerID="b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.162101 4852 scope.go:117] "RemoveContainer" containerID="9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.187272 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.200534 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zj4j4"] Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.202412 4852 scope.go:117] "RemoveContainer" containerID="d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.230310 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.230378 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.252731 4852 scope.go:117] "RemoveContainer" containerID="b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3" Dec 01 20:53:50 crc kubenswrapper[4852]: E1201 20:53:50.253475 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3\": container with ID starting with b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3 not found: ID does not exist" containerID="b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.253521 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3"} err="failed to get container status \"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3\": rpc error: code = NotFound desc = could not find container \"b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3\": container with ID starting with b3745d682d53fe368bbb66efa4d55c69f610f0f9093d037bd4cac5c6d895a9a3 not found: ID does not exist" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.253550 4852 scope.go:117] "RemoveContainer" containerID="9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1" Dec 01 20:53:50 crc kubenswrapper[4852]: E1201 20:53:50.254057 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1\": container with ID starting with 9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1 not found: ID does not exist" containerID="9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.254094 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1"} err="failed to get container status \"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1\": rpc error: code = NotFound desc = could not find container \"9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1\": container with ID starting with 9fb042d1ac46ccd05eb207976b3f3a74ad0c237077c693b6c723f1dd47e4e3c1 not found: ID does not exist" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.254109 4852 scope.go:117] "RemoveContainer" containerID="d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10" Dec 01 20:53:50 crc kubenswrapper[4852]: E1201 20:53:50.254405 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10\": container with ID starting with d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10 not found: ID does not exist" containerID="d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.254428 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10"} err="failed to get container status \"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10\": rpc error: code = NotFound desc = could not find container \"d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10\": container with ID starting with d27ee97e268ce5266e66f464dc2efc79752b3491a752f92588821338a0fcad10 not found: ID does not exist" Dec 01 20:53:50 crc kubenswrapper[4852]: I1201 20:53:50.334623 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="372942fb-f212-4085-8ac6-10282c18cef6" path="/var/lib/kubelet/pods/372942fb-f212-4085-8ac6-10282c18cef6/volumes" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.704696 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:07 crc kubenswrapper[4852]: E1201 20:54:07.705660 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="extract-content" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.705675 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="extract-content" Dec 01 20:54:07 crc kubenswrapper[4852]: E1201 20:54:07.705689 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="extract-utilities" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.705696 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="extract-utilities" Dec 01 20:54:07 crc kubenswrapper[4852]: E1201 20:54:07.705731 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="registry-server" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.705739 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="registry-server" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.705967 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="372942fb-f212-4085-8ac6-10282c18cef6" containerName="registry-server" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.707403 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.733519 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.765021 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8766t\" (UniqueName: \"kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.765404 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.765592 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.867597 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.867669 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.867777 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8766t\" (UniqueName: \"kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.868385 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.868556 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:07 crc kubenswrapper[4852]: I1201 20:54:07.891231 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8766t\" (UniqueName: \"kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t\") pod \"redhat-operators-kwkvw\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:08 crc kubenswrapper[4852]: I1201 20:54:08.041731 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:08 crc kubenswrapper[4852]: I1201 20:54:08.493956 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:09 crc kubenswrapper[4852]: I1201 20:54:09.316733 4852 generic.go:334] "Generic (PLEG): container finished" podID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerID="0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92" exitCode=0 Dec 01 20:54:09 crc kubenswrapper[4852]: I1201 20:54:09.316929 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerDied","Data":"0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92"} Dec 01 20:54:09 crc kubenswrapper[4852]: I1201 20:54:09.317059 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerStarted","Data":"11e4e96e53b7e9c4072caef5ec60645f8dbf4186e77f76f47cf4baefafe52e13"} Dec 01 20:54:10 crc kubenswrapper[4852]: I1201 20:54:10.345414 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerStarted","Data":"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1"} Dec 01 20:54:14 crc kubenswrapper[4852]: I1201 20:54:14.384378 4852 generic.go:334] "Generic (PLEG): container finished" podID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerID="93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1" exitCode=0 Dec 01 20:54:14 crc kubenswrapper[4852]: I1201 20:54:14.384428 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerDied","Data":"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1"} Dec 01 20:54:15 crc kubenswrapper[4852]: I1201 20:54:15.394293 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerStarted","Data":"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7"} Dec 01 20:54:15 crc kubenswrapper[4852]: I1201 20:54:15.416714 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kwkvw" podStartSLOduration=2.705497846 podStartE2EDuration="8.416694688s" podCreationTimestamp="2025-12-01 20:54:07 +0000 UTC" firstStartedPulling="2025-12-01 20:54:09.318722116 +0000 UTC m=+2969.245803533" lastFinishedPulling="2025-12-01 20:54:15.029918968 +0000 UTC m=+2974.957000375" observedRunningTime="2025-12-01 20:54:15.412957181 +0000 UTC m=+2975.340038598" watchObservedRunningTime="2025-12-01 20:54:15.416694688 +0000 UTC m=+2975.343776105" Dec 01 20:54:18 crc kubenswrapper[4852]: I1201 20:54:18.042685 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:18 crc kubenswrapper[4852]: I1201 20:54:18.044031 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:19 crc kubenswrapper[4852]: I1201 20:54:19.088187 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kwkvw" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="registry-server" probeResult="failure" output=< Dec 01 20:54:19 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 20:54:19 crc kubenswrapper[4852]: > Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.230192 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.230267 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.230336 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.231210 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.231284 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" gracePeriod=600 Dec 01 20:54:20 crc kubenswrapper[4852]: E1201 20:54:20.372418 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.440887 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" exitCode=0 Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.440949 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620"} Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.440997 4852 scope.go:117] "RemoveContainer" containerID="615260a2c9cfb8d4223541759178b5521af20ae92f1fdea09ffdfd136377ab8b" Dec 01 20:54:20 crc kubenswrapper[4852]: I1201 20:54:20.441644 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:54:20 crc kubenswrapper[4852]: E1201 20:54:20.441964 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:54:28 crc kubenswrapper[4852]: I1201 20:54:28.102273 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:28 crc kubenswrapper[4852]: I1201 20:54:28.148422 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:28 crc kubenswrapper[4852]: I1201 20:54:28.342922 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:29 crc kubenswrapper[4852]: I1201 20:54:29.527177 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kwkvw" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="registry-server" containerID="cri-o://c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7" gracePeriod=2 Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.073572 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.202233 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities\") pod \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.202305 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8766t\" (UniqueName: \"kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t\") pod \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.202343 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content\") pod \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\" (UID: \"d94f6eaf-d548-4f2f-bda0-97020a4d3002\") " Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.203327 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities" (OuterVolumeSpecName: "utilities") pod "d94f6eaf-d548-4f2f-bda0-97020a4d3002" (UID: "d94f6eaf-d548-4f2f-bda0-97020a4d3002"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.208725 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t" (OuterVolumeSpecName: "kube-api-access-8766t") pod "d94f6eaf-d548-4f2f-bda0-97020a4d3002" (UID: "d94f6eaf-d548-4f2f-bda0-97020a4d3002"). InnerVolumeSpecName "kube-api-access-8766t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.304748 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.304783 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8766t\" (UniqueName: \"kubernetes.io/projected/d94f6eaf-d548-4f2f-bda0-97020a4d3002-kube-api-access-8766t\") on node \"crc\" DevicePath \"\"" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.354642 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d94f6eaf-d548-4f2f-bda0-97020a4d3002" (UID: "d94f6eaf-d548-4f2f-bda0-97020a4d3002"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.407979 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94f6eaf-d548-4f2f-bda0-97020a4d3002-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.540560 4852 generic.go:334] "Generic (PLEG): container finished" podID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerID="c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7" exitCode=0 Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.540609 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerDied","Data":"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7"} Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.540642 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwkvw" event={"ID":"d94f6eaf-d548-4f2f-bda0-97020a4d3002","Type":"ContainerDied","Data":"11e4e96e53b7e9c4072caef5ec60645f8dbf4186e77f76f47cf4baefafe52e13"} Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.540663 4852 scope.go:117] "RemoveContainer" containerID="c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.540824 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwkvw" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.567581 4852 scope.go:117] "RemoveContainer" containerID="93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.585725 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.596032 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kwkvw"] Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.599098 4852 scope.go:117] "RemoveContainer" containerID="0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.637927 4852 scope.go:117] "RemoveContainer" containerID="c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7" Dec 01 20:54:30 crc kubenswrapper[4852]: E1201 20:54:30.638324 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7\": container with ID starting with c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7 not found: ID does not exist" containerID="c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.638372 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7"} err="failed to get container status \"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7\": rpc error: code = NotFound desc = could not find container \"c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7\": container with ID starting with c52899a66514a866171ca9219bd0971b3897ae48bbe1ee53ffb835e44ed41fd7 not found: ID does not exist" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.638401 4852 scope.go:117] "RemoveContainer" containerID="93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1" Dec 01 20:54:30 crc kubenswrapper[4852]: E1201 20:54:30.638856 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1\": container with ID starting with 93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1 not found: ID does not exist" containerID="93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.638895 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1"} err="failed to get container status \"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1\": rpc error: code = NotFound desc = could not find container \"93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1\": container with ID starting with 93631843a44f8291ad35b9fef30f167747dbf699fd835c12cc40492ff7e835a1 not found: ID does not exist" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.638923 4852 scope.go:117] "RemoveContainer" containerID="0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92" Dec 01 20:54:30 crc kubenswrapper[4852]: E1201 20:54:30.639231 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92\": container with ID starting with 0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92 not found: ID does not exist" containerID="0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92" Dec 01 20:54:30 crc kubenswrapper[4852]: I1201 20:54:30.639262 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92"} err="failed to get container status \"0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92\": rpc error: code = NotFound desc = could not find container \"0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92\": container with ID starting with 0f03d008f050c0474e7eac79dc3b23278c796418f1d254c8ebd4b77fc6947d92 not found: ID does not exist" Dec 01 20:54:32 crc kubenswrapper[4852]: I1201 20:54:32.331589 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" path="/var/lib/kubelet/pods/d94f6eaf-d548-4f2f-bda0-97020a4d3002/volumes" Dec 01 20:54:34 crc kubenswrapper[4852]: I1201 20:54:34.319928 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:54:34 crc kubenswrapper[4852]: E1201 20:54:34.320632 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:54:48 crc kubenswrapper[4852]: I1201 20:54:48.322134 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:54:48 crc kubenswrapper[4852]: E1201 20:54:48.322905 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:55:00 crc kubenswrapper[4852]: I1201 20:55:00.325806 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:55:00 crc kubenswrapper[4852]: E1201 20:55:00.326564 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:55:15 crc kubenswrapper[4852]: I1201 20:55:15.319729 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:55:15 crc kubenswrapper[4852]: E1201 20:55:15.320421 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:55:26 crc kubenswrapper[4852]: I1201 20:55:26.320611 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:55:26 crc kubenswrapper[4852]: E1201 20:55:26.322442 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:55:40 crc kubenswrapper[4852]: I1201 20:55:40.327157 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:55:40 crc kubenswrapper[4852]: E1201 20:55:40.327922 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:55:52 crc kubenswrapper[4852]: I1201 20:55:52.320739 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:55:52 crc kubenswrapper[4852]: E1201 20:55:52.321703 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:56:07 crc kubenswrapper[4852]: I1201 20:56:07.320624 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:56:07 crc kubenswrapper[4852]: E1201 20:56:07.321317 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:56:20 crc kubenswrapper[4852]: I1201 20:56:20.330311 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:56:20 crc kubenswrapper[4852]: E1201 20:56:20.331132 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:56:31 crc kubenswrapper[4852]: I1201 20:56:31.319890 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:56:31 crc kubenswrapper[4852]: E1201 20:56:31.320727 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:56:45 crc kubenswrapper[4852]: I1201 20:56:45.321195 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:56:45 crc kubenswrapper[4852]: E1201 20:56:45.322181 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:56:59 crc kubenswrapper[4852]: I1201 20:56:59.320284 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:56:59 crc kubenswrapper[4852]: E1201 20:56:59.321063 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:57:12 crc kubenswrapper[4852]: I1201 20:57:12.319881 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:57:12 crc kubenswrapper[4852]: E1201 20:57:12.320639 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:57:27 crc kubenswrapper[4852]: I1201 20:57:27.320194 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:57:27 crc kubenswrapper[4852]: E1201 20:57:27.321421 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:57:38 crc kubenswrapper[4852]: I1201 20:57:38.320665 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:57:38 crc kubenswrapper[4852]: E1201 20:57:38.321435 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:57:49 crc kubenswrapper[4852]: I1201 20:57:49.320068 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:57:49 crc kubenswrapper[4852]: E1201 20:57:49.322070 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:58:01 crc kubenswrapper[4852]: I1201 20:58:01.319771 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:58:01 crc kubenswrapper[4852]: E1201 20:58:01.320734 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:58:12 crc kubenswrapper[4852]: I1201 20:58:12.320930 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:58:12 crc kubenswrapper[4852]: E1201 20:58:12.321836 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:58:26 crc kubenswrapper[4852]: I1201 20:58:26.320033 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:58:26 crc kubenswrapper[4852]: E1201 20:58:26.320821 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:58:41 crc kubenswrapper[4852]: I1201 20:58:41.319702 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:58:41 crc kubenswrapper[4852]: E1201 20:58:41.320360 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:58:53 crc kubenswrapper[4852]: I1201 20:58:53.319785 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:58:53 crc kubenswrapper[4852]: E1201 20:58:53.320692 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:59:04 crc kubenswrapper[4852]: I1201 20:59:04.320704 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:59:04 crc kubenswrapper[4852]: E1201 20:59:04.321983 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:59:18 crc kubenswrapper[4852]: I1201 20:59:18.320288 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:59:18 crc kubenswrapper[4852]: E1201 20:59:18.321152 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 20:59:30 crc kubenswrapper[4852]: I1201 20:59:30.335846 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 20:59:31 crc kubenswrapper[4852]: I1201 20:59:31.489228 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b"} Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.211496 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj"] Dec 01 21:00:00 crc kubenswrapper[4852]: E1201 21:00:00.212266 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="extract-content" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.212278 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="extract-content" Dec 01 21:00:00 crc kubenswrapper[4852]: E1201 21:00:00.212294 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="registry-server" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.212300 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="registry-server" Dec 01 21:00:00 crc kubenswrapper[4852]: E1201 21:00:00.212319 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="extract-utilities" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.212324 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="extract-utilities" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.212524 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="d94f6eaf-d548-4f2f-bda0-97020a4d3002" containerName="registry-server" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.213966 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.216752 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.216925 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.222364 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj"] Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.301211 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.301258 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.301280 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw7sl\" (UniqueName: \"kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.404786 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.404860 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.404901 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw7sl\" (UniqueName: \"kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.406545 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.412059 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.424350 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw7sl\" (UniqueName: \"kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl\") pod \"collect-profiles-29410380-db8bj\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:00 crc kubenswrapper[4852]: I1201 21:00:00.536413 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:01 crc kubenswrapper[4852]: I1201 21:00:01.047377 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj"] Dec 01 21:00:01 crc kubenswrapper[4852]: I1201 21:00:01.794363 4852 generic.go:334] "Generic (PLEG): container finished" podID="cfc7da09-7151-433b-9e3d-0cd943930cc6" containerID="3c8027020e3d8d2471c4e61c12b2a30602702a07996222e2610a496d7e55d7b5" exitCode=0 Dec 01 21:00:01 crc kubenswrapper[4852]: I1201 21:00:01.794420 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" event={"ID":"cfc7da09-7151-433b-9e3d-0cd943930cc6","Type":"ContainerDied","Data":"3c8027020e3d8d2471c4e61c12b2a30602702a07996222e2610a496d7e55d7b5"} Dec 01 21:00:01 crc kubenswrapper[4852]: I1201 21:00:01.794835 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" event={"ID":"cfc7da09-7151-433b-9e3d-0cd943930cc6","Type":"ContainerStarted","Data":"f34965ae68627017346d0d956300856b1317c7762106f53f8048d65239f93914"} Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.184976 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.375630 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw7sl\" (UniqueName: \"kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl\") pod \"cfc7da09-7151-433b-9e3d-0cd943930cc6\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.375717 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume\") pod \"cfc7da09-7151-433b-9e3d-0cd943930cc6\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.375822 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume\") pod \"cfc7da09-7151-433b-9e3d-0cd943930cc6\" (UID: \"cfc7da09-7151-433b-9e3d-0cd943930cc6\") " Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.376683 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume" (OuterVolumeSpecName: "config-volume") pod "cfc7da09-7151-433b-9e3d-0cd943930cc6" (UID: "cfc7da09-7151-433b-9e3d-0cd943930cc6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.382223 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cfc7da09-7151-433b-9e3d-0cd943930cc6" (UID: "cfc7da09-7151-433b-9e3d-0cd943930cc6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.385793 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl" (OuterVolumeSpecName: "kube-api-access-tw7sl") pod "cfc7da09-7151-433b-9e3d-0cd943930cc6" (UID: "cfc7da09-7151-433b-9e3d-0cd943930cc6"). InnerVolumeSpecName "kube-api-access-tw7sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.478409 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc7da09-7151-433b-9e3d-0cd943930cc6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.478446 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw7sl\" (UniqueName: \"kubernetes.io/projected/cfc7da09-7151-433b-9e3d-0cd943930cc6-kube-api-access-tw7sl\") on node \"crc\" DevicePath \"\"" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.478481 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc7da09-7151-433b-9e3d-0cd943930cc6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.812137 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" event={"ID":"cfc7da09-7151-433b-9e3d-0cd943930cc6","Type":"ContainerDied","Data":"f34965ae68627017346d0d956300856b1317c7762106f53f8048d65239f93914"} Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.812186 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f34965ae68627017346d0d956300856b1317c7762106f53f8048d65239f93914" Dec 01 21:00:03 crc kubenswrapper[4852]: I1201 21:00:03.812645 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410380-db8bj" Dec 01 21:00:04 crc kubenswrapper[4852]: I1201 21:00:04.267912 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c"] Dec 01 21:00:04 crc kubenswrapper[4852]: I1201 21:00:04.277834 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-2zb2c"] Dec 01 21:00:04 crc kubenswrapper[4852]: I1201 21:00:04.331869 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6321b4f4-215e-4e5c-a341-6e970eb6dc9c" path="/var/lib/kubelet/pods/6321b4f4-215e-4e5c-a341-6e970eb6dc9c/volumes" Dec 01 21:00:58 crc kubenswrapper[4852]: I1201 21:00:58.037509 4852 scope.go:117] "RemoveContainer" containerID="dbbe95ba79b288d91b9c04e5e13ebbe5c4ca212610eed4184f8a9466f0ca8dce" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.165712 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29410381-ttp6l"] Dec 01 21:01:00 crc kubenswrapper[4852]: E1201 21:01:00.166381 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc7da09-7151-433b-9e3d-0cd943930cc6" containerName="collect-profiles" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.166393 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc7da09-7151-433b-9e3d-0cd943930cc6" containerName="collect-profiles" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.167075 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc7da09-7151-433b-9e3d-0cd943930cc6" containerName="collect-profiles" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.168395 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.181114 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410381-ttp6l"] Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.220880 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.220948 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w48p\" (UniqueName: \"kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.221189 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.221237 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.322706 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.322752 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w48p\" (UniqueName: \"kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.322811 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.322838 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.331550 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.333955 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.337317 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.343298 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w48p\" (UniqueName: \"kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p\") pod \"keystone-cron-29410381-ttp6l\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.485361 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:00 crc kubenswrapper[4852]: I1201 21:01:00.978424 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410381-ttp6l"] Dec 01 21:01:00 crc kubenswrapper[4852]: W1201 21:01:00.980794 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b1bf086_0cbc_468b_baa4_9c7f6bf1e642.slice/crio-48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc WatchSource:0}: Error finding container 48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc: Status 404 returned error can't find the container with id 48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc Dec 01 21:01:01 crc kubenswrapper[4852]: I1201 21:01:01.358970 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410381-ttp6l" event={"ID":"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642","Type":"ContainerStarted","Data":"5735d0f92ccb9dd81fa0cf4224f5cfea91b8cf596ca7603303f4ec63e5bcbfac"} Dec 01 21:01:01 crc kubenswrapper[4852]: I1201 21:01:01.359325 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410381-ttp6l" event={"ID":"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642","Type":"ContainerStarted","Data":"48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc"} Dec 01 21:01:01 crc kubenswrapper[4852]: I1201 21:01:01.381789 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29410381-ttp6l" podStartSLOduration=1.381770393 podStartE2EDuration="1.381770393s" podCreationTimestamp="2025-12-01 21:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 21:01:01.376137419 +0000 UTC m=+3381.303218836" watchObservedRunningTime="2025-12-01 21:01:01.381770393 +0000 UTC m=+3381.308851820" Dec 01 21:01:03 crc kubenswrapper[4852]: I1201 21:01:03.380095 4852 generic.go:334] "Generic (PLEG): container finished" podID="5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" containerID="5735d0f92ccb9dd81fa0cf4224f5cfea91b8cf596ca7603303f4ec63e5bcbfac" exitCode=0 Dec 01 21:01:03 crc kubenswrapper[4852]: I1201 21:01:03.380181 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410381-ttp6l" event={"ID":"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642","Type":"ContainerDied","Data":"5735d0f92ccb9dd81fa0cf4224f5cfea91b8cf596ca7603303f4ec63e5bcbfac"} Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.776752 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.912651 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle\") pod \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.912698 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data\") pod \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.913097 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w48p\" (UniqueName: \"kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p\") pod \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.913151 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys\") pod \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\" (UID: \"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642\") " Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.919086 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p" (OuterVolumeSpecName: "kube-api-access-4w48p") pod "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" (UID: "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642"). InnerVolumeSpecName "kube-api-access-4w48p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.920100 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" (UID: "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.946697 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" (UID: "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:01:04 crc kubenswrapper[4852]: I1201 21:01:04.968139 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data" (OuterVolumeSpecName: "config-data") pod "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" (UID: "5b1bf086-0cbc-468b-baa4-9c7f6bf1e642"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.016236 4852 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.016272 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.016281 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w48p\" (UniqueName: \"kubernetes.io/projected/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-kube-api-access-4w48p\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.016292 4852 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5b1bf086-0cbc-468b-baa4-9c7f6bf1e642-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.402594 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410381-ttp6l" event={"ID":"5b1bf086-0cbc-468b-baa4-9c7f6bf1e642","Type":"ContainerDied","Data":"48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc"} Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.403502 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48d4cdd84ccb0f1de5c2e248113ecce969aa0d6b4e13160f1067d10f0d096ffc" Dec 01 21:01:05 crc kubenswrapper[4852]: I1201 21:01:05.402658 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410381-ttp6l" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.108300 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:08 crc kubenswrapper[4852]: E1201 21:01:08.109243 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" containerName="keystone-cron" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.109257 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" containerName="keystone-cron" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.109490 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b1bf086-0cbc-468b-baa4-9c7f6bf1e642" containerName="keystone-cron" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.110821 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.122627 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.218510 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl5k5\" (UniqueName: \"kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.218549 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.218988 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.321478 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.321943 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl5k5\" (UniqueName: \"kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.321983 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.321982 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.322352 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.350537 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl5k5\" (UniqueName: \"kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5\") pod \"certified-operators-dmd8j\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.437166 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:08 crc kubenswrapper[4852]: I1201 21:01:08.895830 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:09 crc kubenswrapper[4852]: I1201 21:01:09.463008 4852 generic.go:334] "Generic (PLEG): container finished" podID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerID="ffbe801f14a5acb89f670549cf7a63815442f5ef1171e423860c0ffcfb7c552f" exitCode=0 Dec 01 21:01:09 crc kubenswrapper[4852]: I1201 21:01:09.463384 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerDied","Data":"ffbe801f14a5acb89f670549cf7a63815442f5ef1171e423860c0ffcfb7c552f"} Dec 01 21:01:09 crc kubenswrapper[4852]: I1201 21:01:09.463481 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerStarted","Data":"1c7f954d145b1daa335a553b19294fe4fb72973cb0526032015a862dbd6ffbcc"} Dec 01 21:01:09 crc kubenswrapper[4852]: I1201 21:01:09.466222 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 21:01:10 crc kubenswrapper[4852]: I1201 21:01:10.477147 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerStarted","Data":"49167b097504acd4091c92cd1cb5477fcdc0dc033f9cb2729429742f39d94964"} Dec 01 21:01:11 crc kubenswrapper[4852]: I1201 21:01:11.490344 4852 generic.go:334] "Generic (PLEG): container finished" podID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerID="49167b097504acd4091c92cd1cb5477fcdc0dc033f9cb2729429742f39d94964" exitCode=0 Dec 01 21:01:11 crc kubenswrapper[4852]: I1201 21:01:11.490462 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerDied","Data":"49167b097504acd4091c92cd1cb5477fcdc0dc033f9cb2729429742f39d94964"} Dec 01 21:01:12 crc kubenswrapper[4852]: I1201 21:01:12.501315 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerStarted","Data":"40db1b5b37b974028c1d691227dc6f178c40be802d8b711da9846c412c2424cf"} Dec 01 21:01:12 crc kubenswrapper[4852]: I1201 21:01:12.521357 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dmd8j" podStartSLOduration=1.831768572 podStartE2EDuration="4.52133517s" podCreationTimestamp="2025-12-01 21:01:08 +0000 UTC" firstStartedPulling="2025-12-01 21:01:09.465683775 +0000 UTC m=+3389.392765232" lastFinishedPulling="2025-12-01 21:01:12.155250413 +0000 UTC m=+3392.082331830" observedRunningTime="2025-12-01 21:01:12.51551858 +0000 UTC m=+3392.442599997" watchObservedRunningTime="2025-12-01 21:01:12.52133517 +0000 UTC m=+3392.448416587" Dec 01 21:01:18 crc kubenswrapper[4852]: I1201 21:01:18.438320 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:18 crc kubenswrapper[4852]: I1201 21:01:18.438945 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:18 crc kubenswrapper[4852]: I1201 21:01:18.515102 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:18 crc kubenswrapper[4852]: I1201 21:01:18.621384 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:18 crc kubenswrapper[4852]: I1201 21:01:18.757905 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:20 crc kubenswrapper[4852]: I1201 21:01:20.580431 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dmd8j" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="registry-server" containerID="cri-o://40db1b5b37b974028c1d691227dc6f178c40be802d8b711da9846c412c2424cf" gracePeriod=2 Dec 01 21:01:21 crc kubenswrapper[4852]: I1201 21:01:21.591398 4852 generic.go:334] "Generic (PLEG): container finished" podID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerID="40db1b5b37b974028c1d691227dc6f178c40be802d8b711da9846c412c2424cf" exitCode=0 Dec 01 21:01:21 crc kubenswrapper[4852]: I1201 21:01:21.591489 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerDied","Data":"40db1b5b37b974028c1d691227dc6f178c40be802d8b711da9846c412c2424cf"} Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.237170 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.417148 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content\") pod \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.417256 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities\") pod \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.417653 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl5k5\" (UniqueName: \"kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5\") pod \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\" (UID: \"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e\") " Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.418106 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities" (OuterVolumeSpecName: "utilities") pod "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" (UID: "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.418550 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.426427 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5" (OuterVolumeSpecName: "kube-api-access-hl5k5") pod "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" (UID: "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e"). InnerVolumeSpecName "kube-api-access-hl5k5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.509120 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" (UID: "e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.532721 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.532820 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl5k5\" (UniqueName: \"kubernetes.io/projected/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e-kube-api-access-hl5k5\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.612220 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dmd8j" event={"ID":"e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e","Type":"ContainerDied","Data":"1c7f954d145b1daa335a553b19294fe4fb72973cb0526032015a862dbd6ffbcc"} Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.612280 4852 scope.go:117] "RemoveContainer" containerID="40db1b5b37b974028c1d691227dc6f178c40be802d8b711da9846c412c2424cf" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.612417 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dmd8j" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.648428 4852 scope.go:117] "RemoveContainer" containerID="49167b097504acd4091c92cd1cb5477fcdc0dc033f9cb2729429742f39d94964" Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.670263 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.684752 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dmd8j"] Dec 01 21:01:22 crc kubenswrapper[4852]: I1201 21:01:22.709407 4852 scope.go:117] "RemoveContainer" containerID="ffbe801f14a5acb89f670549cf7a63815442f5ef1171e423860c0ffcfb7c552f" Dec 01 21:01:24 crc kubenswrapper[4852]: I1201 21:01:24.339157 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" path="/var/lib/kubelet/pods/e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e/volumes" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.704184 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:27 crc kubenswrapper[4852]: E1201 21:01:27.714045 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="registry-server" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.714082 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="registry-server" Dec 01 21:01:27 crc kubenswrapper[4852]: E1201 21:01:27.714106 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="extract-utilities" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.714115 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="extract-utilities" Dec 01 21:01:27 crc kubenswrapper[4852]: E1201 21:01:27.714157 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="extract-content" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.714165 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="extract-content" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.715074 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5ce022b-c758-460e-9cd4-2e3ca9b9aa9e" containerName="registry-server" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.721438 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.747984 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.863209 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.863274 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2js6\" (UniqueName: \"kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.864103 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.965297 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.965350 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2js6\" (UniqueName: \"kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.965394 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.965878 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.965893 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:27 crc kubenswrapper[4852]: I1201 21:01:27.990814 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2js6\" (UniqueName: \"kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6\") pod \"redhat-marketplace-9qsb5\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:28 crc kubenswrapper[4852]: I1201 21:01:28.062265 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:28 crc kubenswrapper[4852]: I1201 21:01:28.518787 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:28 crc kubenswrapper[4852]: I1201 21:01:28.675470 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerStarted","Data":"0014143732ba93c240fc0ceaf673ff2c9feec68ef64435a07b1344592a79accd"} Dec 01 21:01:29 crc kubenswrapper[4852]: I1201 21:01:29.688569 4852 generic.go:334] "Generic (PLEG): container finished" podID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerID="b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881" exitCode=0 Dec 01 21:01:29 crc kubenswrapper[4852]: I1201 21:01:29.688681 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerDied","Data":"b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881"} Dec 01 21:01:31 crc kubenswrapper[4852]: I1201 21:01:31.714735 4852 generic.go:334] "Generic (PLEG): container finished" podID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerID="51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21" exitCode=0 Dec 01 21:01:31 crc kubenswrapper[4852]: I1201 21:01:31.714901 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerDied","Data":"51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21"} Dec 01 21:01:32 crc kubenswrapper[4852]: I1201 21:01:32.729356 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerStarted","Data":"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e"} Dec 01 21:01:32 crc kubenswrapper[4852]: I1201 21:01:32.761414 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9qsb5" podStartSLOduration=3.268032196 podStartE2EDuration="5.761392218s" podCreationTimestamp="2025-12-01 21:01:27 +0000 UTC" firstStartedPulling="2025-12-01 21:01:29.69367444 +0000 UTC m=+3409.620755877" lastFinishedPulling="2025-12-01 21:01:32.187034482 +0000 UTC m=+3412.114115899" observedRunningTime="2025-12-01 21:01:32.747418936 +0000 UTC m=+3412.674500383" watchObservedRunningTime="2025-12-01 21:01:32.761392218 +0000 UTC m=+3412.688473645" Dec 01 21:01:38 crc kubenswrapper[4852]: I1201 21:01:38.062441 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:38 crc kubenswrapper[4852]: I1201 21:01:38.063125 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:38 crc kubenswrapper[4852]: I1201 21:01:38.121486 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:38 crc kubenswrapper[4852]: I1201 21:01:38.844568 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:39 crc kubenswrapper[4852]: I1201 21:01:39.317889 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:40 crc kubenswrapper[4852]: I1201 21:01:40.810235 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9qsb5" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="registry-server" containerID="cri-o://b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e" gracePeriod=2 Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.393965 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.503223 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content\") pod \"86838edd-b915-4ede-8774-2b0b2506b4c1\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.503362 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2js6\" (UniqueName: \"kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6\") pod \"86838edd-b915-4ede-8774-2b0b2506b4c1\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.503524 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities\") pod \"86838edd-b915-4ede-8774-2b0b2506b4c1\" (UID: \"86838edd-b915-4ede-8774-2b0b2506b4c1\") " Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.504843 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities" (OuterVolumeSpecName: "utilities") pod "86838edd-b915-4ede-8774-2b0b2506b4c1" (UID: "86838edd-b915-4ede-8774-2b0b2506b4c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.510288 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6" (OuterVolumeSpecName: "kube-api-access-g2js6") pod "86838edd-b915-4ede-8774-2b0b2506b4c1" (UID: "86838edd-b915-4ede-8774-2b0b2506b4c1"). InnerVolumeSpecName "kube-api-access-g2js6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.536756 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86838edd-b915-4ede-8774-2b0b2506b4c1" (UID: "86838edd-b915-4ede-8774-2b0b2506b4c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.605828 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2js6\" (UniqueName: \"kubernetes.io/projected/86838edd-b915-4ede-8774-2b0b2506b4c1-kube-api-access-g2js6\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.605868 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.605878 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86838edd-b915-4ede-8774-2b0b2506b4c1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.826425 4852 generic.go:334] "Generic (PLEG): container finished" podID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerID="b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e" exitCode=0 Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.826519 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerDied","Data":"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e"} Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.826581 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qsb5" event={"ID":"86838edd-b915-4ede-8774-2b0b2506b4c1","Type":"ContainerDied","Data":"0014143732ba93c240fc0ceaf673ff2c9feec68ef64435a07b1344592a79accd"} Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.826594 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qsb5" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.826606 4852 scope.go:117] "RemoveContainer" containerID="b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.862025 4852 scope.go:117] "RemoveContainer" containerID="51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.877114 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.888745 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qsb5"] Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.912293 4852 scope.go:117] "RemoveContainer" containerID="b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.965409 4852 scope.go:117] "RemoveContainer" containerID="b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e" Dec 01 21:01:41 crc kubenswrapper[4852]: E1201 21:01:41.965955 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e\": container with ID starting with b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e not found: ID does not exist" containerID="b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.965997 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e"} err="failed to get container status \"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e\": rpc error: code = NotFound desc = could not find container \"b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e\": container with ID starting with b865feed37b1ab15e9d106edd2af55101e712a8c287968f9dcbb9759ac04b67e not found: ID does not exist" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.966018 4852 scope.go:117] "RemoveContainer" containerID="51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21" Dec 01 21:01:41 crc kubenswrapper[4852]: E1201 21:01:41.966247 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21\": container with ID starting with 51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21 not found: ID does not exist" containerID="51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.966278 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21"} err="failed to get container status \"51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21\": rpc error: code = NotFound desc = could not find container \"51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21\": container with ID starting with 51a652aab9c6b8532a19c5875cd762322678dfc36a079bd2761ffb50036e0e21 not found: ID does not exist" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.966297 4852 scope.go:117] "RemoveContainer" containerID="b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881" Dec 01 21:01:41 crc kubenswrapper[4852]: E1201 21:01:41.966570 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881\": container with ID starting with b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881 not found: ID does not exist" containerID="b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881" Dec 01 21:01:41 crc kubenswrapper[4852]: I1201 21:01:41.966600 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881"} err="failed to get container status \"b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881\": rpc error: code = NotFound desc = could not find container \"b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881\": container with ID starting with b239a84fad80e192c93a0ace52b25cc638e75dc0f1d776ded040ca437831c881 not found: ID does not exist" Dec 01 21:01:42 crc kubenswrapper[4852]: I1201 21:01:42.331981 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" path="/var/lib/kubelet/pods/86838edd-b915-4ede-8774-2b0b2506b4c1/volumes" Dec 01 21:01:50 crc kubenswrapper[4852]: I1201 21:01:50.229496 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:01:50 crc kubenswrapper[4852]: I1201 21:01:50.229940 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:02:20 crc kubenswrapper[4852]: I1201 21:02:20.229959 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:02:20 crc kubenswrapper[4852]: I1201 21:02:20.230493 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:02:50 crc kubenswrapper[4852]: I1201 21:02:50.230063 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:02:50 crc kubenswrapper[4852]: I1201 21:02:50.230668 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:02:50 crc kubenswrapper[4852]: I1201 21:02:50.230728 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 21:02:50 crc kubenswrapper[4852]: I1201 21:02:50.231727 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 21:02:50 crc kubenswrapper[4852]: I1201 21:02:50.231816 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b" gracePeriod=600 Dec 01 21:02:51 crc kubenswrapper[4852]: I1201 21:02:51.052036 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b" exitCode=0 Dec 01 21:02:51 crc kubenswrapper[4852]: I1201 21:02:51.052099 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b"} Dec 01 21:02:51 crc kubenswrapper[4852]: I1201 21:02:51.052951 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613"} Dec 01 21:02:51 crc kubenswrapper[4852]: I1201 21:02:51.052992 4852 scope.go:117] "RemoveContainer" containerID="20a21dc20138fa64b23a17adb01f6a19be48e6e8ca3700f157c2b654ddec0620" Dec 01 21:02:58 crc kubenswrapper[4852]: I1201 21:02:58.125565 4852 generic.go:334] "Generic (PLEG): container finished" podID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" containerID="8ae964a99abb92b126bff8968b6b4d735ca7f4487f3c688f0f16b4085dbc1d5e" exitCode=0 Dec 01 21:02:58 crc kubenswrapper[4852]: I1201 21:02:58.125665 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"db1dc4fa-69a3-4c29-b69b-f6080f275e97","Type":"ContainerDied","Data":"8ae964a99abb92b126bff8968b6b4d735ca7f4487f3c688f0f16b4085dbc1d5e"} Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.523738 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.655228 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8t76\" (UniqueName: \"kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.655699 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.655758 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.655899 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.655947 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656003 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656072 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656133 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656526 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret\") pod \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\" (UID: \"db1dc4fa-69a3-4c29-b69b-f6080f275e97\") " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656596 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data" (OuterVolumeSpecName: "config-data") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.656719 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.657031 4852 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.657056 4852 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.662000 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.662550 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.662774 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76" (OuterVolumeSpecName: "kube-api-access-f8t76") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "kube-api-access-f8t76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.692462 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.692869 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.700684 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.732160 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "db1dc4fa-69a3-4c29-b69b-f6080f275e97" (UID: "db1dc4fa-69a3-4c29-b69b-f6080f275e97"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.758981 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8t76\" (UniqueName: \"kubernetes.io/projected/db1dc4fa-69a3-4c29-b69b-f6080f275e97-kube-api-access-f8t76\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759057 4852 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759073 4852 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/db1dc4fa-69a3-4c29-b69b-f6080f275e97-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759087 4852 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759105 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759117 4852 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.759132 4852 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db1dc4fa-69a3-4c29-b69b-f6080f275e97-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.793390 4852 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 21:02:59 crc kubenswrapper[4852]: I1201 21:02:59.861105 4852 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 21:03:00 crc kubenswrapper[4852]: I1201 21:03:00.148367 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"db1dc4fa-69a3-4c29-b69b-f6080f275e97","Type":"ContainerDied","Data":"7bbdfcbaa1a10bcb64c78f45b7e529db74bea84e25a43efd3dec6086a8297447"} Dec 01 21:03:00 crc kubenswrapper[4852]: I1201 21:03:00.148420 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bbdfcbaa1a10bcb64c78f45b7e529db74bea84e25a43efd3dec6086a8297447" Dec 01 21:03:00 crc kubenswrapper[4852]: I1201 21:03:00.148615 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 21:03:00 crc kubenswrapper[4852]: E1201 21:03:00.301123 4852 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb1dc4fa_69a3_4c29_b69b_f6080f275e97.slice/crio-7bbdfcbaa1a10bcb64c78f45b7e529db74bea84e25a43efd3dec6086a8297447\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb1dc4fa_69a3_4c29_b69b_f6080f275e97.slice\": RecentStats: unable to find data in memory cache]" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.731906 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 21:03:06 crc kubenswrapper[4852]: E1201 21:03:06.733011 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="extract-utilities" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733026 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="extract-utilities" Dec 01 21:03:06 crc kubenswrapper[4852]: E1201 21:03:06.733053 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" containerName="tempest-tests-tempest-tests-runner" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733059 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" containerName="tempest-tests-tempest-tests-runner" Dec 01 21:03:06 crc kubenswrapper[4852]: E1201 21:03:06.733070 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="extract-content" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733076 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="extract-content" Dec 01 21:03:06 crc kubenswrapper[4852]: E1201 21:03:06.733103 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="registry-server" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733111 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="registry-server" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733278 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="db1dc4fa-69a3-4c29-b69b-f6080f275e97" containerName="tempest-tests-tempest-tests-runner" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.733301 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="86838edd-b915-4ede-8774-2b0b2506b4c1" containerName="registry-server" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.734020 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.739262 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wxs6t" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.753310 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.809969 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kxdk\" (UniqueName: \"kubernetes.io/projected/b347c57e-1cf3-41f2-9b2a-3ca688e412f4-kube-api-access-8kxdk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.810222 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.912870 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.913491 4852 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.913812 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kxdk\" (UniqueName: \"kubernetes.io/projected/b347c57e-1cf3-41f2-9b2a-3ca688e412f4-kube-api-access-8kxdk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.949004 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kxdk\" (UniqueName: \"kubernetes.io/projected/b347c57e-1cf3-41f2-9b2a-3ca688e412f4-kube-api-access-8kxdk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:06 crc kubenswrapper[4852]: I1201 21:03:06.964396 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b347c57e-1cf3-41f2-9b2a-3ca688e412f4\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:07 crc kubenswrapper[4852]: I1201 21:03:07.064644 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 21:03:07 crc kubenswrapper[4852]: I1201 21:03:07.534158 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 21:03:08 crc kubenswrapper[4852]: I1201 21:03:08.244378 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b347c57e-1cf3-41f2-9b2a-3ca688e412f4","Type":"ContainerStarted","Data":"cb66c560b861a686bfe0f77544ad6912b1d058cfe8f4115d57770f2e8adfd591"} Dec 01 21:03:09 crc kubenswrapper[4852]: I1201 21:03:09.256247 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b347c57e-1cf3-41f2-9b2a-3ca688e412f4","Type":"ContainerStarted","Data":"e5bd83f2a62e9fdfafdaf1b84738036c7854a479767487f79987147b0c377077"} Dec 01 21:03:09 crc kubenswrapper[4852]: I1201 21:03:09.274370 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.013671872 podStartE2EDuration="3.274346155s" podCreationTimestamp="2025-12-01 21:03:06 +0000 UTC" firstStartedPulling="2025-12-01 21:03:07.538950516 +0000 UTC m=+3507.466031943" lastFinishedPulling="2025-12-01 21:03:08.799624789 +0000 UTC m=+3508.726706226" observedRunningTime="2025-12-01 21:03:09.272537389 +0000 UTC m=+3509.199618826" watchObservedRunningTime="2025-12-01 21:03:09.274346155 +0000 UTC m=+3509.201427612" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.391870 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-29bpp/must-gather-htmvl"] Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.394398 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.397394 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-29bpp"/"openshift-service-ca.crt" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.397951 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-29bpp"/"kube-root-ca.crt" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.398558 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-29bpp"/"default-dockercfg-h78p6" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.400038 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-29bpp/must-gather-htmvl"] Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.539925 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.539990 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2x29\" (UniqueName: \"kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.642380 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.642524 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2x29\" (UniqueName: \"kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.643215 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.665597 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2x29\" (UniqueName: \"kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29\") pod \"must-gather-htmvl\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:32 crc kubenswrapper[4852]: I1201 21:03:32.712081 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:03:33 crc kubenswrapper[4852]: I1201 21:03:33.039763 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-29bpp/must-gather-htmvl"] Dec 01 21:03:33 crc kubenswrapper[4852]: I1201 21:03:33.496759 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/must-gather-htmvl" event={"ID":"78e6d223-0022-426b-9c2c-3f3de97c8c1e","Type":"ContainerStarted","Data":"7a783f931880b47ff5eef9bbe8107ae6e27095241e9f146278916d1e7c976e79"} Dec 01 21:03:41 crc kubenswrapper[4852]: I1201 21:03:41.585528 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/must-gather-htmvl" event={"ID":"78e6d223-0022-426b-9c2c-3f3de97c8c1e","Type":"ContainerStarted","Data":"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b"} Dec 01 21:03:42 crc kubenswrapper[4852]: I1201 21:03:42.615742 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/must-gather-htmvl" event={"ID":"78e6d223-0022-426b-9c2c-3f3de97c8c1e","Type":"ContainerStarted","Data":"0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c"} Dec 01 21:03:42 crc kubenswrapper[4852]: I1201 21:03:42.649431 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-29bpp/must-gather-htmvl" podStartSLOduration=2.7025851850000002 podStartE2EDuration="10.649411099s" podCreationTimestamp="2025-12-01 21:03:32 +0000 UTC" firstStartedPulling="2025-12-01 21:03:33.050912704 +0000 UTC m=+3532.977994121" lastFinishedPulling="2025-12-01 21:03:40.997738568 +0000 UTC m=+3540.924820035" observedRunningTime="2025-12-01 21:03:42.639364179 +0000 UTC m=+3542.566445686" watchObservedRunningTime="2025-12-01 21:03:42.649411099 +0000 UTC m=+3542.576492526" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.565474 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-29bpp/crc-debug-4lfkr"] Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.566868 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.605509 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.605659 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkpkd\" (UniqueName: \"kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.707372 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkpkd\" (UniqueName: \"kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.707552 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.707682 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.726218 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkpkd\" (UniqueName: \"kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd\") pod \"crc-debug-4lfkr\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: I1201 21:03:45.885184 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:03:45 crc kubenswrapper[4852]: W1201 21:03:45.996268 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe15da8c_84f8_4d76_810f_121af396689f.slice/crio-c4468096e0fb3519300b6c66953421c2ef4f0946fdcd729ad446f8dd4f2b91d6 WatchSource:0}: Error finding container c4468096e0fb3519300b6c66953421c2ef4f0946fdcd729ad446f8dd4f2b91d6: Status 404 returned error can't find the container with id c4468096e0fb3519300b6c66953421c2ef4f0946fdcd729ad446f8dd4f2b91d6 Dec 01 21:03:46 crc kubenswrapper[4852]: I1201 21:03:46.657801 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" event={"ID":"be15da8c-84f8-4d76-810f-121af396689f","Type":"ContainerStarted","Data":"c4468096e0fb3519300b6c66953421c2ef4f0946fdcd729ad446f8dd4f2b91d6"} Dec 01 21:04:00 crc kubenswrapper[4852]: I1201 21:04:00.805009 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" event={"ID":"be15da8c-84f8-4d76-810f-121af396689f","Type":"ContainerStarted","Data":"6e5719231e69f4dd84bffcbf352f785c398ca500bf15bcd2888a0766d70b2903"} Dec 01 21:04:00 crc kubenswrapper[4852]: I1201 21:04:00.829152 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" podStartSLOduration=1.650516002 podStartE2EDuration="15.829131672s" podCreationTimestamp="2025-12-01 21:03:45 +0000 UTC" firstStartedPulling="2025-12-01 21:03:46.003409047 +0000 UTC m=+3545.930490464" lastFinishedPulling="2025-12-01 21:04:00.182024717 +0000 UTC m=+3560.109106134" observedRunningTime="2025-12-01 21:04:00.822639591 +0000 UTC m=+3560.749721028" watchObservedRunningTime="2025-12-01 21:04:00.829131672 +0000 UTC m=+3560.756213099" Dec 01 21:04:41 crc kubenswrapper[4852]: I1201 21:04:41.198881 4852 generic.go:334] "Generic (PLEG): container finished" podID="be15da8c-84f8-4d76-810f-121af396689f" containerID="6e5719231e69f4dd84bffcbf352f785c398ca500bf15bcd2888a0766d70b2903" exitCode=0 Dec 01 21:04:41 crc kubenswrapper[4852]: I1201 21:04:41.199020 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" event={"ID":"be15da8c-84f8-4d76-810f-121af396689f","Type":"ContainerDied","Data":"6e5719231e69f4dd84bffcbf352f785c398ca500bf15bcd2888a0766d70b2903"} Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.365598 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.405284 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-4lfkr"] Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.414536 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-4lfkr"] Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.557583 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host\") pod \"be15da8c-84f8-4d76-810f-121af396689f\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.557791 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkpkd\" (UniqueName: \"kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd\") pod \"be15da8c-84f8-4d76-810f-121af396689f\" (UID: \"be15da8c-84f8-4d76-810f-121af396689f\") " Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.557823 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host" (OuterVolumeSpecName: "host") pod "be15da8c-84f8-4d76-810f-121af396689f" (UID: "be15da8c-84f8-4d76-810f-121af396689f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.558639 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/be15da8c-84f8-4d76-810f-121af396689f-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.563138 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd" (OuterVolumeSpecName: "kube-api-access-lkpkd") pod "be15da8c-84f8-4d76-810f-121af396689f" (UID: "be15da8c-84f8-4d76-810f-121af396689f"). InnerVolumeSpecName "kube-api-access-lkpkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:04:42 crc kubenswrapper[4852]: I1201 21:04:42.660737 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkpkd\" (UniqueName: \"kubernetes.io/projected/be15da8c-84f8-4d76-810f-121af396689f-kube-api-access-lkpkd\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.229226 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4468096e0fb3519300b6c66953421c2ef4f0946fdcd729ad446f8dd4f2b91d6" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.229303 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-4lfkr" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.648421 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-29bpp/crc-debug-blthd"] Dec 01 21:04:43 crc kubenswrapper[4852]: E1201 21:04:43.649890 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be15da8c-84f8-4d76-810f-121af396689f" containerName="container-00" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.649976 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="be15da8c-84f8-4d76-810f-121af396689f" containerName="container-00" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.650226 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="be15da8c-84f8-4d76-810f-121af396689f" containerName="container-00" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.650924 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.783720 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvfhp\" (UniqueName: \"kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.784206 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.886599 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.886745 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.887107 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvfhp\" (UniqueName: \"kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.906337 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvfhp\" (UniqueName: \"kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp\") pod \"crc-debug-blthd\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:43 crc kubenswrapper[4852]: I1201 21:04:43.987203 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:44 crc kubenswrapper[4852]: W1201 21:04:44.030685 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1342c3f6_ffc1_4459_b508_1d986ada2895.slice/crio-8909b07dd25959ccd0ce5e44af7859c35e8876a4205d1a309335f4351bd169a4 WatchSource:0}: Error finding container 8909b07dd25959ccd0ce5e44af7859c35e8876a4205d1a309335f4351bd169a4: Status 404 returned error can't find the container with id 8909b07dd25959ccd0ce5e44af7859c35e8876a4205d1a309335f4351bd169a4 Dec 01 21:04:44 crc kubenswrapper[4852]: I1201 21:04:44.242008 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-blthd" event={"ID":"1342c3f6-ffc1-4459-b508-1d986ada2895","Type":"ContainerStarted","Data":"8909b07dd25959ccd0ce5e44af7859c35e8876a4205d1a309335f4351bd169a4"} Dec 01 21:04:44 crc kubenswrapper[4852]: I1201 21:04:44.347047 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be15da8c-84f8-4d76-810f-121af396689f" path="/var/lib/kubelet/pods/be15da8c-84f8-4d76-810f-121af396689f/volumes" Dec 01 21:04:45 crc kubenswrapper[4852]: I1201 21:04:45.258927 4852 generic.go:334] "Generic (PLEG): container finished" podID="1342c3f6-ffc1-4459-b508-1d986ada2895" containerID="c69e2820e1b981b188befe3a6cac7f45074bcffd3c1a005ee9985a42905d1757" exitCode=0 Dec 01 21:04:45 crc kubenswrapper[4852]: I1201 21:04:45.258981 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-blthd" event={"ID":"1342c3f6-ffc1-4459-b508-1d986ada2895","Type":"ContainerDied","Data":"c69e2820e1b981b188befe3a6cac7f45074bcffd3c1a005ee9985a42905d1757"} Dec 01 21:04:45 crc kubenswrapper[4852]: I1201 21:04:45.821148 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-blthd"] Dec 01 21:04:45 crc kubenswrapper[4852]: I1201 21:04:45.828668 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-blthd"] Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.418807 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.537748 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host\") pod \"1342c3f6-ffc1-4459-b508-1d986ada2895\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.537907 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host" (OuterVolumeSpecName: "host") pod "1342c3f6-ffc1-4459-b508-1d986ada2895" (UID: "1342c3f6-ffc1-4459-b508-1d986ada2895"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.537926 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvfhp\" (UniqueName: \"kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp\") pod \"1342c3f6-ffc1-4459-b508-1d986ada2895\" (UID: \"1342c3f6-ffc1-4459-b508-1d986ada2895\") " Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.539126 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1342c3f6-ffc1-4459-b508-1d986ada2895-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.545360 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp" (OuterVolumeSpecName: "kube-api-access-hvfhp") pod "1342c3f6-ffc1-4459-b508-1d986ada2895" (UID: "1342c3f6-ffc1-4459-b508-1d986ada2895"). InnerVolumeSpecName "kube-api-access-hvfhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.640723 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvfhp\" (UniqueName: \"kubernetes.io/projected/1342c3f6-ffc1-4459-b508-1d986ada2895-kube-api-access-hvfhp\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.970892 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-29bpp/crc-debug-8dq9j"] Dec 01 21:04:46 crc kubenswrapper[4852]: E1201 21:04:46.971538 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1342c3f6-ffc1-4459-b508-1d986ada2895" containerName="container-00" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.971556 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="1342c3f6-ffc1-4459-b508-1d986ada2895" containerName="container-00" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.971801 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="1342c3f6-ffc1-4459-b508-1d986ada2895" containerName="container-00" Dec 01 21:04:46 crc kubenswrapper[4852]: I1201 21:04:46.972423 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.150921 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxsbq\" (UniqueName: \"kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.151406 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.253855 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxsbq\" (UniqueName: \"kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.253986 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.254114 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.279575 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxsbq\" (UniqueName: \"kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq\") pod \"crc-debug-8dq9j\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.285537 4852 scope.go:117] "RemoveContainer" containerID="c69e2820e1b981b188befe3a6cac7f45074bcffd3c1a005ee9985a42905d1757" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.285789 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-blthd" Dec 01 21:04:47 crc kubenswrapper[4852]: I1201 21:04:47.294374 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.301040 4852 generic.go:334] "Generic (PLEG): container finished" podID="05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" containerID="b8045b896e184270757d0b2f8b1e8c7d4f5513044d0cde4b600fa9d23dca77ff" exitCode=0 Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.301140 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" event={"ID":"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12","Type":"ContainerDied","Data":"b8045b896e184270757d0b2f8b1e8c7d4f5513044d0cde4b600fa9d23dca77ff"} Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.301445 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" event={"ID":"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12","Type":"ContainerStarted","Data":"e3a72d919f8d328c3d2ce01f8b9b6106eaf1a016f05230d9a5cb635a59b98960"} Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.338424 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1342c3f6-ffc1-4459-b508-1d986ada2895" path="/var/lib/kubelet/pods/1342c3f6-ffc1-4459-b508-1d986ada2895/volumes" Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.358398 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-8dq9j"] Dec 01 21:04:48 crc kubenswrapper[4852]: I1201 21:04:48.369534 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-29bpp/crc-debug-8dq9j"] Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.408605 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.599333 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxsbq\" (UniqueName: \"kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq\") pod \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.599413 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host\") pod \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\" (UID: \"05ec4fd7-7d06-43ca-a568-5bc0a2bbde12\") " Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.599484 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host" (OuterVolumeSpecName: "host") pod "05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" (UID: "05ec4fd7-7d06-43ca-a568-5bc0a2bbde12"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.599888 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.611206 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq" (OuterVolumeSpecName: "kube-api-access-gxsbq") pod "05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" (UID: "05ec4fd7-7d06-43ca-a568-5bc0a2bbde12"). InnerVolumeSpecName "kube-api-access-gxsbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:04:49 crc kubenswrapper[4852]: I1201 21:04:49.701869 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxsbq\" (UniqueName: \"kubernetes.io/projected/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12-kube-api-access-gxsbq\") on node \"crc\" DevicePath \"\"" Dec 01 21:04:50 crc kubenswrapper[4852]: I1201 21:04:50.229607 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:04:50 crc kubenswrapper[4852]: I1201 21:04:50.229655 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:04:50 crc kubenswrapper[4852]: I1201 21:04:50.358587 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/crc-debug-8dq9j" Dec 01 21:04:50 crc kubenswrapper[4852]: I1201 21:04:50.365062 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" path="/var/lib/kubelet/pods/05ec4fd7-7d06-43ca-a568-5bc0a2bbde12/volumes" Dec 01 21:04:50 crc kubenswrapper[4852]: I1201 21:04:50.365820 4852 scope.go:117] "RemoveContainer" containerID="b8045b896e184270757d0b2f8b1e8c7d4f5513044d0cde4b600fa9d23dca77ff" Dec 01 21:05:03 crc kubenswrapper[4852]: I1201 21:05:03.764295 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6b6b9785cb-jncbj_66e282d8-f6fc-4c9b-84e2-398efd252579/barbican-api/0.log" Dec 01 21:05:03 crc kubenswrapper[4852]: I1201 21:05:03.874812 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6b6b9785cb-jncbj_66e282d8-f6fc-4c9b-84e2-398efd252579/barbican-api-log/0.log" Dec 01 21:05:03 crc kubenswrapper[4852]: I1201 21:05:03.969895 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6ffc9cc59d-mbzlh_d6030470-6a0e-43fc-ae0c-755a3d4a9980/barbican-keystone-listener/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.013777 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6ffc9cc59d-mbzlh_d6030470-6a0e-43fc-ae0c-755a3d4a9980/barbican-keystone-listener-log/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.150594 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5dcb96d78f-vvtc5_6ec762a2-a422-40fd-91a8-fdaf58be343c/barbican-worker/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.167353 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5dcb96d78f-vvtc5_6ec762a2-a422-40fd-91a8-fdaf58be343c/barbican-worker-log/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.312743 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm_03d266a8-6787-4bc8-8836-d11fb0d078b4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.382769 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/ceilometer-central-agent/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.487240 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/ceilometer-notification-agent/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.495272 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/proxy-httpd/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.577090 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/sg-core/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.703354 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f20e47a0-b3f2-48e5-baae-1e75e24377ac/cinder-api/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.712435 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f20e47a0-b3f2-48e5-baae-1e75e24377ac/cinder-api-log/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.871140 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a/cinder-scheduler/0.log" Dec 01 21:05:04 crc kubenswrapper[4852]: I1201 21:05:04.946704 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a/probe/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.082551 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c_d7fb0098-9a59-4686-a483-8a1361628214/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.168393 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr_5839b9be-5c81-47e2-b392-bf8652b0403e/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.320309 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/init/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.462575 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/init/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.531584 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/dnsmasq-dns/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.543673 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q7pls_f399c1ee-c0af-4085-953e-6333beb90786/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.726054 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_02fed1df-7a8d-41ed-8662-17ecda728c06/glance-httpd/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.783811 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_02fed1df-7a8d-41ed-8662-17ecda728c06/glance-log/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.904309 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cbf2dd-2b08-4fa7-9530-e5835103a6d3/glance-httpd/0.log" Dec 01 21:05:05 crc kubenswrapper[4852]: I1201 21:05:05.909409 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cbf2dd-2b08-4fa7-9530-e5835103a6d3/glance-log/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.139948 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfd6f888-xxwbg_160a77b2-5ec6-4223-b939-8e90b339f530/horizon/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.229231 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-s57rw_a0df6c51-df24-475e-b857-39aafce2f093/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.349961 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pn2q6_4055e3d3-767a-4a20-95e4-dda1685cbe61/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.407598 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfd6f888-xxwbg_160a77b2-5ec6-4223-b939-8e90b339f530/horizon-log/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.686712 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-574c7f8dfc-6k2xn_d5fbd8b0-59fb-402e-8442-0302ea125e49/keystone-api/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.707618 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410381-ttp6l_5b1bf086-0cbc-468b-baa4-9c7f6bf1e642/keystone-cron/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.809399 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_2896f52e-0a75-4d18-b72b-66b173aaa3b2/kube-state-metrics/0.log" Dec 01 21:05:06 crc kubenswrapper[4852]: I1201 21:05:06.921847 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv_6e87649b-f17b-4067-9803-f1cd06147f82/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:07 crc kubenswrapper[4852]: I1201 21:05:07.262828 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55686cd79f-5wjtq_fc88aba6-cf67-4609-9a04-797090fcce15/neutron-httpd/0.log" Dec 01 21:05:07 crc kubenswrapper[4852]: I1201 21:05:07.264305 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55686cd79f-5wjtq_fc88aba6-cf67-4609-9a04-797090fcce15/neutron-api/0.log" Dec 01 21:05:07 crc kubenswrapper[4852]: I1201 21:05:07.487565 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr_4f398cd5-1aca-4178-9fcd-50a3bb15bfec/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.014559 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_623fc41b-9221-407d-a5d7-e59ce151725a/nova-api-log/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.023287 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_30f58a14-9ee3-44ea-9737-f14510a50b29/nova-cell0-conductor-conductor/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.339156 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_623fc41b-9221-407d-a5d7-e59ce151725a/nova-api-api/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.383893 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04/nova-cell1-conductor-conductor/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.396660 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_fb428793-fd4f-4e29-a7e1-7c5b539d01d7/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.620670 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9669b_aaf7fe95-5448-404e-b2f4-7bac25b267db/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.721629 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b254248d-f4d2-454d-bc92-09e0d709a0b8/nova-metadata-log/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.966966 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f04ccdb1-afe8-4f1c-b475-b10384993bdc/nova-scheduler-scheduler/0.log" Dec 01 21:05:08 crc kubenswrapper[4852]: I1201 21:05:08.979083 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/mysql-bootstrap/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.205028 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/mysql-bootstrap/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.281934 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/galera/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.400399 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/mysql-bootstrap/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.612492 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/galera/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.622884 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/mysql-bootstrap/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.860337 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_49382464-d20a-4ec7-9096-5679b0fc12b7/openstackclient/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.951163 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8kwmk_5ef29299-3043-4921-b77b-07416d89ed96/ovn-controller/0.log" Dec 01 21:05:09 crc kubenswrapper[4852]: I1201 21:05:09.951394 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b254248d-f4d2-454d-bc92-09e0d709a0b8/nova-metadata-metadata/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.140382 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-86jpt_b30528cc-b404-4564-bcac-da1fdc60ae52/openstack-network-exporter/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.237115 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server-init/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.404244 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.426602 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovs-vswitchd/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.459417 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server-init/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.661768 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-sphgd_9f4a4b48-5ead-42a3-9438-ec9103db3b39/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.681530 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7170ebb9-5806-4a03-8316-8c396a916197/ovn-northd/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.697512 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7170ebb9-5806-4a03-8316-8c396a916197/openstack-network-exporter/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.887216 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e569dc0-0de8-47cf-a1d3-1e649efde4af/openstack-network-exporter/0.log" Dec 01 21:05:10 crc kubenswrapper[4852]: I1201 21:05:10.912873 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e569dc0-0de8-47cf-a1d3-1e649efde4af/ovsdbserver-nb/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.089827 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ff6ee0b-6797-494c-8166-88c5cc7cf3fe/openstack-network-exporter/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.103623 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ff6ee0b-6797-494c-8166-88c5cc7cf3fe/ovsdbserver-sb/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.308565 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-fd7b4cb9d-8zvhn_e12728e7-2002-493c-ad13-3bbb68e8ecf7/placement-api/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.400782 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-fd7b4cb9d-8zvhn_e12728e7-2002-493c-ad13-3bbb68e8ecf7/placement-log/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.438255 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/setup-container/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.649297 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/setup-container/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.717918 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/rabbitmq/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.749052 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/setup-container/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.950364 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq_23761811-cb87-42a7-b8a4-1ababc02ac47/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:11 crc kubenswrapper[4852]: I1201 21:05:11.989158 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/rabbitmq/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.000115 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/setup-container/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.189571 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-26vmn_79e71e37-cc0d-42e9-89dd-9cb4722aa53a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.219647 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6_3edb936c-fcd0-4599-9c43-6ed0a4b957c4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.451967 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fwc7m_68042211-6c10-446e-bf41-ebfeff2a87ef/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.526532 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w5hlg_78d28364-d4ba-45c7-be9a-d3a138e64800/ssh-known-hosts-edpm-deployment/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.738405 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-cd9d56787-qlkbk_aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c/proxy-server/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.799789 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-cd9d56787-qlkbk_aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c/proxy-httpd/0.log" Dec 01 21:05:12 crc kubenswrapper[4852]: I1201 21:05:12.889132 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-v8pzw_d31bdc12-ed48-45e2-b990-2b098be82119/swift-ring-rebalance/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.004239 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-auditor/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.043799 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-reaper/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.120433 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-replicator/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.215054 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-server/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.250902 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-auditor/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.299809 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-replicator/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.316544 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-server/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.409194 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-updater/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.489020 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-auditor/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.523947 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-expirer/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.537344 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-replicator/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.610936 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-server/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.717202 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/rsync/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.721099 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-updater/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.743274 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/swift-recon-cron/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.972146 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_db1dc4fa-69a3-4c29-b69b-f6080f275e97/tempest-tests-tempest-tests-runner/0.log" Dec 01 21:05:13 crc kubenswrapper[4852]: I1201 21:05:13.999552 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx_59dae619-1f63-4b50-84ed-037a15a55876/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:14 crc kubenswrapper[4852]: I1201 21:05:14.169393 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b347c57e-1cf3-41f2-9b2a-3ca688e412f4/test-operator-logs-container/0.log" Dec 01 21:05:14 crc kubenswrapper[4852]: I1201 21:05:14.240357 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm_991ddb8f-bb11-4661-9604-2663fc221fc8/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:05:20 crc kubenswrapper[4852]: I1201 21:05:20.232567 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:05:20 crc kubenswrapper[4852]: I1201 21:05:20.232979 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:05:23 crc kubenswrapper[4852]: I1201 21:05:23.077075 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_5724232f-c6e6-4356-b4b2-a622191bedaf/memcached/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.285056 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.446615 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.459496 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.480239 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.623141 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/extract/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.625512 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.632886 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.848688 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bd4jr_7d884d8c-acfc-47fe-bee2-f0248f8b0eea/kube-rbac-proxy/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.885172 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2khxz_152c7905-249d-4195-afe6-7b02b5d8267d/kube-rbac-proxy/0.log" Dec 01 21:05:39 crc kubenswrapper[4852]: I1201 21:05:39.900564 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bd4jr_7d884d8c-acfc-47fe-bee2-f0248f8b0eea/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.087044 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2khxz_152c7905-249d-4195-afe6-7b02b5d8267d/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.088074 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-gtsjg_714caded-89c7-44a3-a832-2fbaa0e00ac2/kube-rbac-proxy/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.122408 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-gtsjg_714caded-89c7-44a3-a832-2fbaa0e00ac2/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.401446 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-ccgjt_efb3ff96-731f-4a01-8bed-636717f36fb4/kube-rbac-proxy/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.495357 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-ccgjt_efb3ff96-731f-4a01-8bed-636717f36fb4/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.573594 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-z7gzm_b9f7343b-9bba-43e0-bb25-80a5f3fb139f/kube-rbac-proxy/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.605400 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-z7gzm_b9f7343b-9bba-43e0-bb25-80a5f3fb139f/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.650553 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q7fhl_fa3d71fd-60b1-488c-9ae0-abb212b8d0a4/kube-rbac-proxy/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.778100 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q7fhl_fa3d71fd-60b1-488c-9ae0-abb212b8d0a4/manager/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.830917 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-7w8xr_d2869063-cc44-4cd4-b1f6-5b33a5250e77/kube-rbac-proxy/0.log" Dec 01 21:05:40 crc kubenswrapper[4852]: I1201 21:05:40.994633 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-7w8xr_d2869063-cc44-4cd4-b1f6-5b33a5250e77/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.041350 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-4rphb_b7818ab0-4a52-48fe-a0c8-88d162745762/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.041676 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-4rphb_b7818ab0-4a52-48fe-a0c8-88d162745762/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.216299 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-nh9t8_3aa88cab-a21d-40d4-b278-8c006ce138ff/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.273685 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-nh9t8_3aa88cab-a21d-40d4-b278-8c006ce138ff/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.377139 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-lz6m7_4908b6e5-acd8-4754-877f-18a3b8897aa5/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.388910 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-lz6m7_4908b6e5-acd8-4754-877f-18a3b8897aa5/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.466433 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rpswz_6ded5e30-894b-4718-b10e-6cdcf29ea854/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.572157 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rpswz_6ded5e30-894b-4718-b10e-6cdcf29ea854/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.621276 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tdw8l_2842a3ca-0708-4395-babd-b9dbdc1509d8/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.686688 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tdw8l_2842a3ca-0708-4395-babd-b9dbdc1509d8/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.773200 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5mktq_4ec2c5f6-679b-4f91-ab45-3eae7b12cd13/kube-rbac-proxy/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.893577 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5mktq_4ec2c5f6-679b-4f91-ab45-3eae7b12cd13/manager/0.log" Dec 01 21:05:41 crc kubenswrapper[4852]: I1201 21:05:41.956652 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2phng_cc6a88c3-1e45-470c-ba3b-c15c83afbcec/kube-rbac-proxy/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.018574 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2phng_cc6a88c3-1e45-470c-ba3b-c15c83afbcec/manager/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.112443 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446t9t22_c81cbe79-aa85-4707-a3d6-246bf422575b/kube-rbac-proxy/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.165349 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446t9t22_c81cbe79-aa85-4707-a3d6-246bf422575b/manager/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.501721 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-vz4mn_7c160373-5106-41ac-8e58-9ae48e82f5b5/operator/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.639486 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-kc7p9_3e4739d8-ba85-4187-9f0b-b51d0c81b8f5/registry-server/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.714326 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ktgl2_e3a2d94e-61fb-406b-be5d-4ae5f0c18fda/kube-rbac-proxy/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.894440 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ktgl2_e3a2d94e-61fb-406b-be5d-4ae5f0c18fda/manager/0.log" Dec 01 21:05:42 crc kubenswrapper[4852]: I1201 21:05:42.905869 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-bzkqb_8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca/kube-rbac-proxy/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.021082 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-bzkqb_8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca/manager/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.099348 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-pzvgc_35e98bd5-b71c-4842-9511-52b5c9d8e25a/operator/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.202865 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-blr2g_268f049d-790e-4b1f-958d-0f07ba335215/kube-rbac-proxy/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.400694 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-qvhk9_980395ee-3c8d-41a7-9663-7bc33fb4cd46/manager/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.416228 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rqqkl_28cd4665-305c-4855-87c6-f267402d0b05/kube-rbac-proxy/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.427152 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-blr2g_268f049d-790e-4b1f-958d-0f07ba335215/manager/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.484503 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rqqkl_28cd4665-305c-4855-87c6-f267402d0b05/manager/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.594287 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-hncmj_7240ba3b-5f4b-4c63-99cf-4fe68d720fb5/kube-rbac-proxy/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.599365 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-hncmj_7240ba3b-5f4b-4c63-99cf-4fe68d720fb5/manager/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.680265 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lslxr_1ae195c5-0850-4ca5-85e4-abc7ac4d79dc/kube-rbac-proxy/0.log" Dec 01 21:05:43 crc kubenswrapper[4852]: I1201 21:05:43.804731 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lslxr_1ae195c5-0850-4ca5-85e4-abc7ac4d79dc/manager/0.log" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.229716 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.230193 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.230238 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.230921 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.230988 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" gracePeriod=600 Dec 01 21:05:50 crc kubenswrapper[4852]: E1201 21:05:50.354649 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.876609 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" exitCode=0 Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.876656 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613"} Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.876947 4852 scope.go:117] "RemoveContainer" containerID="1d6e6819cf926b9f130b851943ce2f5c9f5212d1ae5d8a80eff0b7503e1cfd4b" Dec 01 21:05:50 crc kubenswrapper[4852]: I1201 21:05:50.877566 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:05:50 crc kubenswrapper[4852]: E1201 21:05:50.877892 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:03 crc kubenswrapper[4852]: I1201 21:06:03.319650 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:06:03 crc kubenswrapper[4852]: E1201 21:06:03.320480 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:03 crc kubenswrapper[4852]: I1201 21:06:03.725461 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-v6j29_08729539-55f5-4d1c-a952-9af42aa77b9c/control-plane-machine-set-operator/0.log" Dec 01 21:06:03 crc kubenswrapper[4852]: I1201 21:06:03.857055 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tb6ll_e476ff27-7c39-4627-b799-282107cac068/kube-rbac-proxy/0.log" Dec 01 21:06:03 crc kubenswrapper[4852]: I1201 21:06:03.901025 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tb6ll_e476ff27-7c39-4627-b799-282107cac068/machine-api-operator/0.log" Dec 01 21:06:14 crc kubenswrapper[4852]: I1201 21:06:14.321066 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:06:14 crc kubenswrapper[4852]: E1201 21:06:14.322008 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:16 crc kubenswrapper[4852]: I1201 21:06:16.815697 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-q996h_95f06176-4d0e-4c13-ae9b-1f0a4b7f6256/cert-manager-controller/0.log" Dec 01 21:06:16 crc kubenswrapper[4852]: I1201 21:06:16.964692 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-mf8nb_6686a8a5-9086-46eb-a481-5ed17b0e1318/cert-manager-cainjector/0.log" Dec 01 21:06:17 crc kubenswrapper[4852]: I1201 21:06:17.021048 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-twxqd_3155f5b4-1371-40c2-be4a-f099a19001a9/cert-manager-webhook/0.log" Dec 01 21:06:27 crc kubenswrapper[4852]: I1201 21:06:27.320994 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:06:27 crc kubenswrapper[4852]: E1201 21:06:27.322100 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:29 crc kubenswrapper[4852]: I1201 21:06:29.638419 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-vf2l4_f5d7a90a-1755-450a-903a-016f63394e43/nmstate-console-plugin/0.log" Dec 01 21:06:29 crc kubenswrapper[4852]: I1201 21:06:29.825240 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-dxw88_0d3319a8-2dcb-459f-9d3d-6f1eab59ae18/nmstate-handler/0.log" Dec 01 21:06:29 crc kubenswrapper[4852]: I1201 21:06:29.882251 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qdrrg_fe79317d-951e-446a-9ba2-0d272c5bd48c/kube-rbac-proxy/0.log" Dec 01 21:06:29 crc kubenswrapper[4852]: I1201 21:06:29.934339 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qdrrg_fe79317d-951e-446a-9ba2-0d272c5bd48c/nmstate-metrics/0.log" Dec 01 21:06:30 crc kubenswrapper[4852]: I1201 21:06:30.034682 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-jqpds_ce552c22-e8f7-4f0d-a5a3-055dd64a6123/nmstate-operator/0.log" Dec 01 21:06:30 crc kubenswrapper[4852]: I1201 21:06:30.124278 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-sk6kv_4816cba9-dcc7-48c9-b4b6-a41513a2611b/nmstate-webhook/0.log" Dec 01 21:06:39 crc kubenswrapper[4852]: I1201 21:06:39.320389 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:06:39 crc kubenswrapper[4852]: E1201 21:06:39.321094 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.513602 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-62hbm_29e50547-faa6-4d14-adee-5ea9e0264a42/kube-rbac-proxy/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.625437 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-62hbm_29e50547-faa6-4d14-adee-5ea9e0264a42/controller/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.772693 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.904541 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.923772 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.951696 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:06:44 crc kubenswrapper[4852]: I1201 21:06:44.973762 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.143546 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.169664 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.202506 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.203664 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.444429 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.449217 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.455399 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.473148 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/controller/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.617052 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/kube-rbac-proxy/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.663823 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/frr-metrics/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.679435 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/kube-rbac-proxy-frr/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.842984 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/reloader/0.log" Dec 01 21:06:45 crc kubenswrapper[4852]: I1201 21:06:45.910356 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-vwpnc_7b0c5529-861a-4fa1-82f7-72c2463171ee/frr-k8s-webhook-server/0.log" Dec 01 21:06:46 crc kubenswrapper[4852]: I1201 21:06:46.037843 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6ddd45494c-nkr5p_573029ff-5b2f-408d-aa44-da5d6ab202c0/manager/0.log" Dec 01 21:06:46 crc kubenswrapper[4852]: I1201 21:06:46.268693 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-58df767f78-7ndmk_13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e/webhook-server/0.log" Dec 01 21:06:46 crc kubenswrapper[4852]: I1201 21:06:46.340150 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bs8kz_487029c0-a6d2-4f9a-a9d1-d819b22d1279/kube-rbac-proxy/0.log" Dec 01 21:06:46 crc kubenswrapper[4852]: I1201 21:06:46.926657 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bs8kz_487029c0-a6d2-4f9a-a9d1-d819b22d1279/speaker/0.log" Dec 01 21:06:46 crc kubenswrapper[4852]: I1201 21:06:46.998748 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/frr/0.log" Dec 01 21:06:50 crc kubenswrapper[4852]: I1201 21:06:50.335153 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:06:50 crc kubenswrapper[4852]: E1201 21:06:50.336163 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:06:59 crc kubenswrapper[4852]: I1201 21:06:59.439840 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:06:59 crc kubenswrapper[4852]: I1201 21:06:59.622613 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:06:59 crc kubenswrapper[4852]: I1201 21:06:59.967162 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:06:59 crc kubenswrapper[4852]: I1201 21:06:59.967306 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:06:59 crc kubenswrapper[4852]: I1201 21:06:59.967426 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.162492 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.190190 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/extract/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.192702 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.314618 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.390782 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.400815 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.539177 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.549128 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/extract/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.570096 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.725209 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.888385 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.891933 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:07:00 crc kubenswrapper[4852]: I1201 21:07:00.899312 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.059855 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.077079 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.266696 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.320157 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:07:01 crc kubenswrapper[4852]: E1201 21:07:01.320349 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.417867 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.463600 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.501018 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/registry-server/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.508237 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.660268 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.758208 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:07:01 crc kubenswrapper[4852]: I1201 21:07:01.911002 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/3.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.104422 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.111350 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/2.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.118818 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/registry-server/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.354142 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.361895 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.373741 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.530477 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.538302 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.626596 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/registry-server/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.693190 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.877027 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.902842 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:07:02 crc kubenswrapper[4852]: I1201 21:07:02.907920 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:07:03 crc kubenswrapper[4852]: I1201 21:07:03.101259 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:07:03 crc kubenswrapper[4852]: I1201 21:07:03.106596 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:07:03 crc kubenswrapper[4852]: I1201 21:07:03.458034 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/registry-server/0.log" Dec 01 21:07:16 crc kubenswrapper[4852]: I1201 21:07:16.320055 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:07:16 crc kubenswrapper[4852]: E1201 21:07:16.320905 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:07:30 crc kubenswrapper[4852]: I1201 21:07:30.326974 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:07:30 crc kubenswrapper[4852]: E1201 21:07:30.327671 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:07:41 crc kubenswrapper[4852]: I1201 21:07:41.322003 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:07:41 crc kubenswrapper[4852]: E1201 21:07:41.323013 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:07:54 crc kubenswrapper[4852]: I1201 21:07:54.320685 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:07:54 crc kubenswrapper[4852]: E1201 21:07:54.323076 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:08:06 crc kubenswrapper[4852]: I1201 21:08:06.334705 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:08:06 crc kubenswrapper[4852]: E1201 21:08:06.335348 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:08:18 crc kubenswrapper[4852]: I1201 21:08:18.320749 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:08:18 crc kubenswrapper[4852]: E1201 21:08:18.321406 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:08:31 crc kubenswrapper[4852]: I1201 21:08:31.320539 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:08:31 crc kubenswrapper[4852]: E1201 21:08:31.321896 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.175989 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:08:40 crc kubenswrapper[4852]: E1201 21:08:40.177287 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" containerName="container-00" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.177309 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" containerName="container-00" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.177736 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="05ec4fd7-7d06-43ca-a568-5bc0a2bbde12" containerName="container-00" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.180181 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.200538 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.216114 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.216628 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.217095 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c77qb\" (UniqueName: \"kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.318514 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.318614 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.318661 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c77qb\" (UniqueName: \"kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.319367 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.319476 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.353812 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c77qb\" (UniqueName: \"kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb\") pod \"redhat-operators-lkdpb\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:40 crc kubenswrapper[4852]: I1201 21:08:40.509360 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:41 crc kubenswrapper[4852]: I1201 21:08:41.002181 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:08:41 crc kubenswrapper[4852]: W1201 21:08:41.015936 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2baca1fa_af34_4cbc_a3bc_83d569c920a9.slice/crio-c038f63562f9a736bf7f83fbc9e97707585798383a1808b4802a0c7a85c1e123 WatchSource:0}: Error finding container c038f63562f9a736bf7f83fbc9e97707585798383a1808b4802a0c7a85c1e123: Status 404 returned error can't find the container with id c038f63562f9a736bf7f83fbc9e97707585798383a1808b4802a0c7a85c1e123 Dec 01 21:08:41 crc kubenswrapper[4852]: I1201 21:08:41.691155 4852 generic.go:334] "Generic (PLEG): container finished" podID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerID="abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19" exitCode=0 Dec 01 21:08:41 crc kubenswrapper[4852]: I1201 21:08:41.691277 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerDied","Data":"abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19"} Dec 01 21:08:41 crc kubenswrapper[4852]: I1201 21:08:41.691529 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerStarted","Data":"c038f63562f9a736bf7f83fbc9e97707585798383a1808b4802a0c7a85c1e123"} Dec 01 21:08:41 crc kubenswrapper[4852]: I1201 21:08:41.694959 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 21:08:43 crc kubenswrapper[4852]: I1201 21:08:43.725711 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerStarted","Data":"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89"} Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.320376 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:08:45 crc kubenswrapper[4852]: E1201 21:08:45.320957 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.750650 4852 generic.go:334] "Generic (PLEG): container finished" podID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerID="9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b" exitCode=0 Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.750726 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-29bpp/must-gather-htmvl" event={"ID":"78e6d223-0022-426b-9c2c-3f3de97c8c1e","Type":"ContainerDied","Data":"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b"} Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.751519 4852 scope.go:117] "RemoveContainer" containerID="9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b" Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.753026 4852 generic.go:334] "Generic (PLEG): container finished" podID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerID="caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89" exitCode=0 Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.753079 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerDied","Data":"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89"} Dec 01 21:08:45 crc kubenswrapper[4852]: I1201 21:08:45.970887 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-29bpp_must-gather-htmvl_78e6d223-0022-426b-9c2c-3f3de97c8c1e/gather/0.log" Dec 01 21:08:46 crc kubenswrapper[4852]: I1201 21:08:46.773203 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerStarted","Data":"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62"} Dec 01 21:08:46 crc kubenswrapper[4852]: I1201 21:08:46.808091 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lkdpb" podStartSLOduration=2.23134439 podStartE2EDuration="6.808074618s" podCreationTimestamp="2025-12-01 21:08:40 +0000 UTC" firstStartedPulling="2025-12-01 21:08:41.694575031 +0000 UTC m=+3841.621656468" lastFinishedPulling="2025-12-01 21:08:46.271305269 +0000 UTC m=+3846.198386696" observedRunningTime="2025-12-01 21:08:46.802531965 +0000 UTC m=+3846.729613432" watchObservedRunningTime="2025-12-01 21:08:46.808074618 +0000 UTC m=+3846.735156035" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.140674 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.143465 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.170227 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.207499 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52kz6\" (UniqueName: \"kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.207758 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.207828 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.309846 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52kz6\" (UniqueName: \"kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.309903 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.309999 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.310656 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.311270 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.333334 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52kz6\" (UniqueName: \"kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6\") pod \"community-operators-qlvxn\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.504913 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:49 crc kubenswrapper[4852]: I1201 21:08:49.877696 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:08:49 crc kubenswrapper[4852]: W1201 21:08:49.892307 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78dec9d0_5261_4d3f_ad9d_5f6bc3066257.slice/crio-4ebbbf86c9a7f93ea3ede9a3f7914743f0cf2e4d17ddcbe98e56b8145ee5499a WatchSource:0}: Error finding container 4ebbbf86c9a7f93ea3ede9a3f7914743f0cf2e4d17ddcbe98e56b8145ee5499a: Status 404 returned error can't find the container with id 4ebbbf86c9a7f93ea3ede9a3f7914743f0cf2e4d17ddcbe98e56b8145ee5499a Dec 01 21:08:50 crc kubenswrapper[4852]: I1201 21:08:50.510918 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:50 crc kubenswrapper[4852]: I1201 21:08:50.512015 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:08:50 crc kubenswrapper[4852]: I1201 21:08:50.820030 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerStarted","Data":"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f"} Dec 01 21:08:50 crc kubenswrapper[4852]: I1201 21:08:50.820096 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerStarted","Data":"4ebbbf86c9a7f93ea3ede9a3f7914743f0cf2e4d17ddcbe98e56b8145ee5499a"} Dec 01 21:08:51 crc kubenswrapper[4852]: I1201 21:08:51.560201 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lkdpb" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="registry-server" probeResult="failure" output=< Dec 01 21:08:51 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 21:08:51 crc kubenswrapper[4852]: > Dec 01 21:08:51 crc kubenswrapper[4852]: I1201 21:08:51.831088 4852 generic.go:334] "Generic (PLEG): container finished" podID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerID="e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f" exitCode=0 Dec 01 21:08:51 crc kubenswrapper[4852]: I1201 21:08:51.831148 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerDied","Data":"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f"} Dec 01 21:08:52 crc kubenswrapper[4852]: I1201 21:08:52.843781 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerStarted","Data":"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c"} Dec 01 21:08:53 crc kubenswrapper[4852]: I1201 21:08:53.855738 4852 generic.go:334] "Generic (PLEG): container finished" podID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerID="9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c" exitCode=0 Dec 01 21:08:53 crc kubenswrapper[4852]: I1201 21:08:53.855784 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerDied","Data":"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c"} Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.118901 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-29bpp/must-gather-htmvl"] Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.119207 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-29bpp/must-gather-htmvl" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="copy" containerID="cri-o://0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c" gracePeriod=2 Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.127045 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-29bpp/must-gather-htmvl"] Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.589889 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-29bpp_must-gather-htmvl_78e6d223-0022-426b-9c2c-3f3de97c8c1e/copy/0.log" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.590941 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.716020 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output\") pod \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.716076 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2x29\" (UniqueName: \"kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29\") pod \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\" (UID: \"78e6d223-0022-426b-9c2c-3f3de97c8c1e\") " Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.726635 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29" (OuterVolumeSpecName: "kube-api-access-h2x29") pod "78e6d223-0022-426b-9c2c-3f3de97c8c1e" (UID: "78e6d223-0022-426b-9c2c-3f3de97c8c1e"). InnerVolumeSpecName "kube-api-access-h2x29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.818204 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2x29\" (UniqueName: \"kubernetes.io/projected/78e6d223-0022-426b-9c2c-3f3de97c8c1e-kube-api-access-h2x29\") on node \"crc\" DevicePath \"\"" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.867413 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerStarted","Data":"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd"} Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.869224 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-29bpp_must-gather-htmvl_78e6d223-0022-426b-9c2c-3f3de97c8c1e/copy/0.log" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.869699 4852 generic.go:334] "Generic (PLEG): container finished" podID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerID="0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c" exitCode=143 Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.869749 4852 scope.go:117] "RemoveContainer" containerID="0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.869905 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-29bpp/must-gather-htmvl" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.870004 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "78e6d223-0022-426b-9c2c-3f3de97c8c1e" (UID: "78e6d223-0022-426b-9c2c-3f3de97c8c1e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.889603 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qlvxn" podStartSLOduration=3.314379882 podStartE2EDuration="5.889581888s" podCreationTimestamp="2025-12-01 21:08:49 +0000 UTC" firstStartedPulling="2025-12-01 21:08:51.839388365 +0000 UTC m=+3851.766469792" lastFinishedPulling="2025-12-01 21:08:54.414590381 +0000 UTC m=+3854.341671798" observedRunningTime="2025-12-01 21:08:54.884720786 +0000 UTC m=+3854.811802193" watchObservedRunningTime="2025-12-01 21:08:54.889581888 +0000 UTC m=+3854.816663305" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.894863 4852 scope.go:117] "RemoveContainer" containerID="9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.920418 4852 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/78e6d223-0022-426b-9c2c-3f3de97c8c1e-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.973730 4852 scope.go:117] "RemoveContainer" containerID="0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c" Dec 01 21:08:54 crc kubenswrapper[4852]: E1201 21:08:54.976880 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c\": container with ID starting with 0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c not found: ID does not exist" containerID="0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.976922 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c"} err="failed to get container status \"0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c\": rpc error: code = NotFound desc = could not find container \"0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c\": container with ID starting with 0ed9d11a38801af0ea1445cb7016909e9dbb766f883caaba84c084382e62a78c not found: ID does not exist" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.976949 4852 scope.go:117] "RemoveContainer" containerID="9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b" Dec 01 21:08:54 crc kubenswrapper[4852]: E1201 21:08:54.977504 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b\": container with ID starting with 9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b not found: ID does not exist" containerID="9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b" Dec 01 21:08:54 crc kubenswrapper[4852]: I1201 21:08:54.977529 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b"} err="failed to get container status \"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b\": rpc error: code = NotFound desc = could not find container \"9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b\": container with ID starting with 9ef98cdc09e0171bb80822db9506aab94390b8daf44196704a5fd730fdf0a97b not found: ID does not exist" Dec 01 21:08:56 crc kubenswrapper[4852]: I1201 21:08:56.335342 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" path="/var/lib/kubelet/pods/78e6d223-0022-426b-9c2c-3f3de97c8c1e/volumes" Dec 01 21:08:59 crc kubenswrapper[4852]: I1201 21:08:59.505687 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:59 crc kubenswrapper[4852]: I1201 21:08:59.506179 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:59 crc kubenswrapper[4852]: I1201 21:08:59.549182 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:08:59 crc kubenswrapper[4852]: I1201 21:08:59.976072 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:09:00 crc kubenswrapper[4852]: I1201 21:09:00.029016 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:09:00 crc kubenswrapper[4852]: I1201 21:09:00.337876 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:00 crc kubenswrapper[4852]: E1201 21:09:00.338413 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:09:00 crc kubenswrapper[4852]: I1201 21:09:00.587644 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:09:00 crc kubenswrapper[4852]: I1201 21:09:00.645263 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:09:01 crc kubenswrapper[4852]: I1201 21:09:01.938219 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qlvxn" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="registry-server" containerID="cri-o://2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd" gracePeriod=2 Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.191462 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.191736 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lkdpb" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="registry-server" containerID="cri-o://225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62" gracePeriod=2 Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.439693 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.579621 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities\") pod \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.579871 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content\") pod \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.580071 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52kz6\" (UniqueName: \"kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6\") pod \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\" (UID: \"78dec9d0-5261-4d3f-ad9d-5f6bc3066257\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.580683 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities" (OuterVolumeSpecName: "utilities") pod "78dec9d0-5261-4d3f-ad9d-5f6bc3066257" (UID: "78dec9d0-5261-4d3f-ad9d-5f6bc3066257"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.584414 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6" (OuterVolumeSpecName: "kube-api-access-52kz6") pod "78dec9d0-5261-4d3f-ad9d-5f6bc3066257" (UID: "78dec9d0-5261-4d3f-ad9d-5f6bc3066257"). InnerVolumeSpecName "kube-api-access-52kz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.640964 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.643536 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78dec9d0-5261-4d3f-ad9d-5f6bc3066257" (UID: "78dec9d0-5261-4d3f-ad9d-5f6bc3066257"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.682412 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.682451 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.682487 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52kz6\" (UniqueName: \"kubernetes.io/projected/78dec9d0-5261-4d3f-ad9d-5f6bc3066257-kube-api-access-52kz6\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.784440 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities\") pod \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.784604 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c77qb\" (UniqueName: \"kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb\") pod \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.784799 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content\") pod \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\" (UID: \"2baca1fa-af34-4cbc-a3bc-83d569c920a9\") " Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.786059 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities" (OuterVolumeSpecName: "utilities") pod "2baca1fa-af34-4cbc-a3bc-83d569c920a9" (UID: "2baca1fa-af34-4cbc-a3bc-83d569c920a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.788939 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb" (OuterVolumeSpecName: "kube-api-access-c77qb") pod "2baca1fa-af34-4cbc-a3bc-83d569c920a9" (UID: "2baca1fa-af34-4cbc-a3bc-83d569c920a9"). InnerVolumeSpecName "kube-api-access-c77qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.887564 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.887814 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c77qb\" (UniqueName: \"kubernetes.io/projected/2baca1fa-af34-4cbc-a3bc-83d569c920a9-kube-api-access-c77qb\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.953413 4852 generic.go:334] "Generic (PLEG): container finished" podID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerID="2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd" exitCode=0 Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.953488 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerDied","Data":"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd"} Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.953585 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlvxn" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.954594 4852 scope.go:117] "RemoveContainer" containerID="2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.954489 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlvxn" event={"ID":"78dec9d0-5261-4d3f-ad9d-5f6bc3066257","Type":"ContainerDied","Data":"4ebbbf86c9a7f93ea3ede9a3f7914743f0cf2e4d17ddcbe98e56b8145ee5499a"} Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.958903 4852 generic.go:334] "Generic (PLEG): container finished" podID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerID="225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62" exitCode=0 Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.958973 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerDied","Data":"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62"} Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.959017 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkdpb" event={"ID":"2baca1fa-af34-4cbc-a3bc-83d569c920a9","Type":"ContainerDied","Data":"c038f63562f9a736bf7f83fbc9e97707585798383a1808b4802a0c7a85c1e123"} Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.959115 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkdpb" Dec 01 21:09:02 crc kubenswrapper[4852]: I1201 21:09:02.974339 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2baca1fa-af34-4cbc-a3bc-83d569c920a9" (UID: "2baca1fa-af34-4cbc-a3bc-83d569c920a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:02.995028 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2baca1fa-af34-4cbc-a3bc-83d569c920a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.003002 4852 scope.go:117] "RemoveContainer" containerID="9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.011920 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.019057 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qlvxn"] Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.032490 4852 scope.go:117] "RemoveContainer" containerID="e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.060437 4852 scope.go:117] "RemoveContainer" containerID="2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.060859 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd\": container with ID starting with 2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd not found: ID does not exist" containerID="2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.060895 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd"} err="failed to get container status \"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd\": rpc error: code = NotFound desc = could not find container \"2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd\": container with ID starting with 2ac738a19924c43abf5d953db0bfced8c9355ae053399868afbcd175bd32addd not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.060918 4852 scope.go:117] "RemoveContainer" containerID="9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.061130 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c\": container with ID starting with 9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c not found: ID does not exist" containerID="9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.061154 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c"} err="failed to get container status \"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c\": rpc error: code = NotFound desc = could not find container \"9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c\": container with ID starting with 9642fa9f7f38dbf5581ed16ee084e1b86e22c8c67d09992aeda1b0737eb0035c not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.061177 4852 scope.go:117] "RemoveContainer" containerID="e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.061338 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f\": container with ID starting with e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f not found: ID does not exist" containerID="e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.061359 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f"} err="failed to get container status \"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f\": rpc error: code = NotFound desc = could not find container \"e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f\": container with ID starting with e237df2dbb19251a9537efa69512d5739d945dd377898790441759a3c3314e5f not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.061371 4852 scope.go:117] "RemoveContainer" containerID="225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.104970 4852 scope.go:117] "RemoveContainer" containerID="caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.124936 4852 scope.go:117] "RemoveContainer" containerID="abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.207572 4852 scope.go:117] "RemoveContainer" containerID="225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.208053 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62\": container with ID starting with 225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62 not found: ID does not exist" containerID="225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.208095 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62"} err="failed to get container status \"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62\": rpc error: code = NotFound desc = could not find container \"225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62\": container with ID starting with 225f875bed3a067b44ab987c8cf91240f9b3b32d8e5a0b5bd5edfa0efc5a3b62 not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.208123 4852 scope.go:117] "RemoveContainer" containerID="caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.208405 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89\": container with ID starting with caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89 not found: ID does not exist" containerID="caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.208434 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89"} err="failed to get container status \"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89\": rpc error: code = NotFound desc = could not find container \"caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89\": container with ID starting with caf40e0643c869280cae3cca223a74b6ec55a856a7cbdc8da521e09851150c89 not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.208459 4852 scope.go:117] "RemoveContainer" containerID="abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19" Dec 01 21:09:03 crc kubenswrapper[4852]: E1201 21:09:03.208668 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19\": container with ID starting with abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19 not found: ID does not exist" containerID="abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.208695 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19"} err="failed to get container status \"abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19\": rpc error: code = NotFound desc = could not find container \"abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19\": container with ID starting with abfcb090d1a213043282f2519f83344ff15056ad82b61897c2459886349b4c19 not found: ID does not exist" Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.289805 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:09:03 crc kubenswrapper[4852]: I1201 21:09:03.297447 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lkdpb"] Dec 01 21:09:04 crc kubenswrapper[4852]: I1201 21:09:04.338799 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" path="/var/lib/kubelet/pods/2baca1fa-af34-4cbc-a3bc-83d569c920a9/volumes" Dec 01 21:09:04 crc kubenswrapper[4852]: I1201 21:09:04.339740 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" path="/var/lib/kubelet/pods/78dec9d0-5261-4d3f-ad9d-5f6bc3066257/volumes" Dec 01 21:09:11 crc kubenswrapper[4852]: I1201 21:09:11.320501 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:11 crc kubenswrapper[4852]: E1201 21:09:11.321240 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:09:24 crc kubenswrapper[4852]: I1201 21:09:24.320632 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:24 crc kubenswrapper[4852]: E1201 21:09:24.321589 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:09:36 crc kubenswrapper[4852]: I1201 21:09:36.320196 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:36 crc kubenswrapper[4852]: E1201 21:09:36.321430 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:09:48 crc kubenswrapper[4852]: I1201 21:09:48.321301 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:48 crc kubenswrapper[4852]: E1201 21:09:48.322552 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:09:59 crc kubenswrapper[4852]: I1201 21:09:59.320533 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:09:59 crc kubenswrapper[4852]: E1201 21:09:59.321699 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:10:10 crc kubenswrapper[4852]: I1201 21:10:10.330126 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:10:10 crc kubenswrapper[4852]: E1201 21:10:10.331386 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:10:21 crc kubenswrapper[4852]: I1201 21:10:21.319988 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:10:21 crc kubenswrapper[4852]: E1201 21:10:21.320593 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:10:33 crc kubenswrapper[4852]: I1201 21:10:33.320634 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:10:33 crc kubenswrapper[4852]: E1201 21:10:33.322024 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:10:44 crc kubenswrapper[4852]: I1201 21:10:44.320300 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:10:44 crc kubenswrapper[4852]: E1201 21:10:44.321071 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:10:58 crc kubenswrapper[4852]: I1201 21:10:58.320359 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:10:58 crc kubenswrapper[4852]: I1201 21:10:58.447704 4852 scope.go:117] "RemoveContainer" containerID="6e5719231e69f4dd84bffcbf352f785c398ca500bf15bcd2888a0766d70b2903" Dec 01 21:10:59 crc kubenswrapper[4852]: I1201 21:10:59.414213 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f"} Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.639470 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649401 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="copy" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649424 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="copy" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649434 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="extract-content" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649442 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="extract-content" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649475 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649483 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649499 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="extract-utilities" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649506 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="extract-utilities" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649526 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="extract-content" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649532 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="extract-content" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649556 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="gather" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649563 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="gather" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649579 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649586 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: E1201 21:11:33.649613 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="extract-utilities" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649621 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="extract-utilities" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649831 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="copy" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649862 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="2baca1fa-af34-4cbc-a3bc-83d569c920a9" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649878 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e6d223-0022-426b-9c2c-3f3de97c8c1e" containerName="gather" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.649890 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="78dec9d0-5261-4d3f-ad9d-5f6bc3066257" containerName="registry-server" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.651692 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.654209 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.826519 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.826658 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.826740 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddsdh\" (UniqueName: \"kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.928869 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.928982 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddsdh\" (UniqueName: \"kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.929055 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.929655 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.929923 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.953770 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddsdh\" (UniqueName: \"kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh\") pod \"certified-operators-cmvgg\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:33 crc kubenswrapper[4852]: I1201 21:11:33.977153 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:34 crc kubenswrapper[4852]: I1201 21:11:34.491799 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:34 crc kubenswrapper[4852]: I1201 21:11:34.820864 4852 generic.go:334] "Generic (PLEG): container finished" podID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerID="818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3" exitCode=0 Dec 01 21:11:34 crc kubenswrapper[4852]: I1201 21:11:34.821277 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerDied","Data":"818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3"} Dec 01 21:11:34 crc kubenswrapper[4852]: I1201 21:11:34.821325 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerStarted","Data":"7e298a2cf10a2898176f4d9405a83847e7c84b9f9d1fd7f08e87e140b2050d91"} Dec 01 21:11:35 crc kubenswrapper[4852]: I1201 21:11:35.836389 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerStarted","Data":"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f"} Dec 01 21:11:36 crc kubenswrapper[4852]: I1201 21:11:36.853580 4852 generic.go:334] "Generic (PLEG): container finished" podID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerID="3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f" exitCode=0 Dec 01 21:11:36 crc kubenswrapper[4852]: I1201 21:11:36.853849 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerDied","Data":"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f"} Dec 01 21:11:37 crc kubenswrapper[4852]: I1201 21:11:37.871110 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerStarted","Data":"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c"} Dec 01 21:11:37 crc kubenswrapper[4852]: I1201 21:11:37.897413 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cmvgg" podStartSLOduration=2.31742263 podStartE2EDuration="4.897385124s" podCreationTimestamp="2025-12-01 21:11:33 +0000 UTC" firstStartedPulling="2025-12-01 21:11:34.824970357 +0000 UTC m=+4014.752051804" lastFinishedPulling="2025-12-01 21:11:37.404932841 +0000 UTC m=+4017.332014298" observedRunningTime="2025-12-01 21:11:37.896106214 +0000 UTC m=+4017.823187641" watchObservedRunningTime="2025-12-01 21:11:37.897385124 +0000 UTC m=+4017.824466551" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.799292 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85nxj/must-gather-bmzf4"] Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.808431 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.812039 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-85nxj"/"default-dockercfg-6bkvh" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.812342 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-85nxj"/"kube-root-ca.crt" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.813846 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-85nxj"/"openshift-service-ca.crt" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.828152 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-85nxj/must-gather-bmzf4"] Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.908502 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzq6j\" (UniqueName: \"kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:41 crc kubenswrapper[4852]: I1201 21:11:41.909124 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.012200 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.012864 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.013167 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzq6j\" (UniqueName: \"kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.032324 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzq6j\" (UniqueName: \"kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j\") pod \"must-gather-bmzf4\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.132316 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.621438 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-85nxj/must-gather-bmzf4"] Dec 01 21:11:42 crc kubenswrapper[4852]: I1201 21:11:42.941235 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/must-gather-bmzf4" event={"ID":"b41cb2be-dc82-4acc-a3fa-e0110c3934e1","Type":"ContainerStarted","Data":"7ef284d6d2258183d816c5902a8cbaa6f5166b3df87dd9c5fdce24087b42a9b6"} Dec 01 21:11:43 crc kubenswrapper[4852]: I1201 21:11:43.964424 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/must-gather-bmzf4" event={"ID":"b41cb2be-dc82-4acc-a3fa-e0110c3934e1","Type":"ContainerStarted","Data":"b82de4c811b7e344cc555a464757b68d18bfb4582810f575a2a881449af77d2c"} Dec 01 21:11:43 crc kubenswrapper[4852]: I1201 21:11:43.964982 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/must-gather-bmzf4" event={"ID":"b41cb2be-dc82-4acc-a3fa-e0110c3934e1","Type":"ContainerStarted","Data":"aedb19257f37274aa00b07832c465df5437883a1128edc93212e59a6b68ba127"} Dec 01 21:11:43 crc kubenswrapper[4852]: I1201 21:11:43.977767 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:43 crc kubenswrapper[4852]: I1201 21:11:43.980970 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:44 crc kubenswrapper[4852]: I1201 21:11:44.007360 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-85nxj/must-gather-bmzf4" podStartSLOduration=3.007331098 podStartE2EDuration="3.007331098s" podCreationTimestamp="2025-12-01 21:11:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 21:11:43.990920114 +0000 UTC m=+4023.918001571" watchObservedRunningTime="2025-12-01 21:11:44.007331098 +0000 UTC m=+4023.934412545" Dec 01 21:11:44 crc kubenswrapper[4852]: I1201 21:11:44.056343 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:45 crc kubenswrapper[4852]: I1201 21:11:45.035566 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:45 crc kubenswrapper[4852]: I1201 21:11:45.094855 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.622391 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85nxj/crc-debug-sj6th"] Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.624867 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.706521 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x5bp\" (UniqueName: \"kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.706581 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.807884 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x5bp\" (UniqueName: \"kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.807937 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.808084 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.828556 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x5bp\" (UniqueName: \"kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp\") pod \"crc-debug-sj6th\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: I1201 21:11:46.942601 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:11:46 crc kubenswrapper[4852]: W1201 21:11:46.984124 4852 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0c066ed_6220_472e_a312_bba1b8439754.slice/crio-55429a5fa0acadf802d140234b078d3f6d47718f026b4464c0f36eccd2e35250 WatchSource:0}: Error finding container 55429a5fa0acadf802d140234b078d3f6d47718f026b4464c0f36eccd2e35250: Status 404 returned error can't find the container with id 55429a5fa0acadf802d140234b078d3f6d47718f026b4464c0f36eccd2e35250 Dec 01 21:11:47 crc kubenswrapper[4852]: I1201 21:11:47.000156 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-sj6th" event={"ID":"c0c066ed-6220-472e-a312-bba1b8439754","Type":"ContainerStarted","Data":"55429a5fa0acadf802d140234b078d3f6d47718f026b4464c0f36eccd2e35250"} Dec 01 21:11:47 crc kubenswrapper[4852]: I1201 21:11:47.000389 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cmvgg" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="registry-server" containerID="cri-o://d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c" gracePeriod=2 Dec 01 21:11:47 crc kubenswrapper[4852]: I1201 21:11:47.991426 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.025566 4852 generic.go:334] "Generic (PLEG): container finished" podID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerID="d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c" exitCode=0 Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.025623 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerDied","Data":"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c"} Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.025646 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmvgg" event={"ID":"5389dfb0-c97c-422a-9e16-65c1ebb8c508","Type":"ContainerDied","Data":"7e298a2cf10a2898176f4d9405a83847e7c84b9f9d1fd7f08e87e140b2050d91"} Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.025663 4852 scope.go:117] "RemoveContainer" containerID="d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.025886 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmvgg" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.040052 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-sj6th" event={"ID":"c0c066ed-6220-472e-a312-bba1b8439754","Type":"ContainerStarted","Data":"0380b97c5f8cc85996fce088ad9c2990f9e78f06923fd596778aeecd2893e209"} Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.062708 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-85nxj/crc-debug-sj6th" podStartSLOduration=2.062684009 podStartE2EDuration="2.062684009s" podCreationTimestamp="2025-12-01 21:11:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 21:11:48.052837981 +0000 UTC m=+4027.979919398" watchObservedRunningTime="2025-12-01 21:11:48.062684009 +0000 UTC m=+4027.989765426" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.069661 4852 scope.go:117] "RemoveContainer" containerID="3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.102710 4852 scope.go:117] "RemoveContainer" containerID="818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.126393 4852 scope.go:117] "RemoveContainer" containerID="d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c" Dec 01 21:11:48 crc kubenswrapper[4852]: E1201 21:11:48.129857 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c\": container with ID starting with d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c not found: ID does not exist" containerID="d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.129887 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c"} err="failed to get container status \"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c\": rpc error: code = NotFound desc = could not find container \"d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c\": container with ID starting with d298bc60c70d18295fbc17b46cedc84229736371651c98e26ac99e193dde6f0c not found: ID does not exist" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.129905 4852 scope.go:117] "RemoveContainer" containerID="3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f" Dec 01 21:11:48 crc kubenswrapper[4852]: E1201 21:11:48.130258 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f\": container with ID starting with 3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f not found: ID does not exist" containerID="3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.130376 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f"} err="failed to get container status \"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f\": rpc error: code = NotFound desc = could not find container \"3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f\": container with ID starting with 3320fa38cbf9887b28ab9f45d738f6a339c89ab61b6ffeb9b6aa6f65b47d8d3f not found: ID does not exist" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.130550 4852 scope.go:117] "RemoveContainer" containerID="818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3" Dec 01 21:11:48 crc kubenswrapper[4852]: E1201 21:11:48.133595 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3\": container with ID starting with 818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3 not found: ID does not exist" containerID="818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.133647 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3"} err="failed to get container status \"818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3\": rpc error: code = NotFound desc = could not find container \"818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3\": container with ID starting with 818d633f5e4d45c5889927ad3bc4d09cbc0d71fc84e70f9fd1aff0eb47dadba3 not found: ID does not exist" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.134259 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content\") pod \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.134561 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddsdh\" (UniqueName: \"kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh\") pod \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.134717 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities\") pod \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\" (UID: \"5389dfb0-c97c-422a-9e16-65c1ebb8c508\") " Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.135598 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities" (OuterVolumeSpecName: "utilities") pod "5389dfb0-c97c-422a-9e16-65c1ebb8c508" (UID: "5389dfb0-c97c-422a-9e16-65c1ebb8c508"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.145702 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh" (OuterVolumeSpecName: "kube-api-access-ddsdh") pod "5389dfb0-c97c-422a-9e16-65c1ebb8c508" (UID: "5389dfb0-c97c-422a-9e16-65c1ebb8c508"). InnerVolumeSpecName "kube-api-access-ddsdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.176042 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5389dfb0-c97c-422a-9e16-65c1ebb8c508" (UID: "5389dfb0-c97c-422a-9e16-65c1ebb8c508"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.237471 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddsdh\" (UniqueName: \"kubernetes.io/projected/5389dfb0-c97c-422a-9e16-65c1ebb8c508-kube-api-access-ddsdh\") on node \"crc\" DevicePath \"\"" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.237504 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.237518 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5389dfb0-c97c-422a-9e16-65c1ebb8c508-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.366591 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:48 crc kubenswrapper[4852]: I1201 21:11:48.374199 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cmvgg"] Dec 01 21:11:50 crc kubenswrapper[4852]: I1201 21:11:50.337969 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" path="/var/lib/kubelet/pods/5389dfb0-c97c-422a-9e16-65c1ebb8c508/volumes" Dec 01 21:12:20 crc kubenswrapper[4852]: I1201 21:12:20.355460 4852 generic.go:334] "Generic (PLEG): container finished" podID="c0c066ed-6220-472e-a312-bba1b8439754" containerID="0380b97c5f8cc85996fce088ad9c2990f9e78f06923fd596778aeecd2893e209" exitCode=0 Dec 01 21:12:20 crc kubenswrapper[4852]: I1201 21:12:20.355504 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-sj6th" event={"ID":"c0c066ed-6220-472e-a312-bba1b8439754","Type":"ContainerDied","Data":"0380b97c5f8cc85996fce088ad9c2990f9e78f06923fd596778aeecd2893e209"} Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.464610 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.500946 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-sj6th"] Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.509805 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-sj6th"] Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.542089 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host\") pod \"c0c066ed-6220-472e-a312-bba1b8439754\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.542158 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x5bp\" (UniqueName: \"kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp\") pod \"c0c066ed-6220-472e-a312-bba1b8439754\" (UID: \"c0c066ed-6220-472e-a312-bba1b8439754\") " Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.542207 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host" (OuterVolumeSpecName: "host") pod "c0c066ed-6220-472e-a312-bba1b8439754" (UID: "c0c066ed-6220-472e-a312-bba1b8439754"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.542528 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0c066ed-6220-472e-a312-bba1b8439754-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.548868 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp" (OuterVolumeSpecName: "kube-api-access-4x5bp") pod "c0c066ed-6220-472e-a312-bba1b8439754" (UID: "c0c066ed-6220-472e-a312-bba1b8439754"). InnerVolumeSpecName "kube-api-access-4x5bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:12:21 crc kubenswrapper[4852]: I1201 21:12:21.644380 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x5bp\" (UniqueName: \"kubernetes.io/projected/c0c066ed-6220-472e-a312-bba1b8439754-kube-api-access-4x5bp\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.338256 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c066ed-6220-472e-a312-bba1b8439754" path="/var/lib/kubelet/pods/c0c066ed-6220-472e-a312-bba1b8439754/volumes" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.376370 4852 scope.go:117] "RemoveContainer" containerID="0380b97c5f8cc85996fce088ad9c2990f9e78f06923fd596778aeecd2893e209" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.376494 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-sj6th" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704260 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85nxj/crc-debug-jm44j"] Dec 01 21:12:22 crc kubenswrapper[4852]: E1201 21:12:22.704667 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="extract-content" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704679 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="extract-content" Dec 01 21:12:22 crc kubenswrapper[4852]: E1201 21:12:22.704694 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="extract-utilities" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704699 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="extract-utilities" Dec 01 21:12:22 crc kubenswrapper[4852]: E1201 21:12:22.704721 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c066ed-6220-472e-a312-bba1b8439754" containerName="container-00" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704727 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c066ed-6220-472e-a312-bba1b8439754" containerName="container-00" Dec 01 21:12:22 crc kubenswrapper[4852]: E1201 21:12:22.704750 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="registry-server" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704756 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="registry-server" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704935 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c066ed-6220-472e-a312-bba1b8439754" containerName="container-00" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.704965 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="5389dfb0-c97c-422a-9e16-65c1ebb8c508" containerName="registry-server" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.705553 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.865144 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8xvg\" (UniqueName: \"kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.865211 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.966500 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8xvg\" (UniqueName: \"kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.966555 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.966740 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:22 crc kubenswrapper[4852]: I1201 21:12:22.984888 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8xvg\" (UniqueName: \"kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg\") pod \"crc-debug-jm44j\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.028318 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.386841 4852 generic.go:334] "Generic (PLEG): container finished" podID="67ad9026-615e-42f4-a7da-d0091bb56805" containerID="04967e514f8df3e7e99abd454101c64f6b9c056c933d36c304fc2f942b63a5ad" exitCode=0 Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.387027 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-jm44j" event={"ID":"67ad9026-615e-42f4-a7da-d0091bb56805","Type":"ContainerDied","Data":"04967e514f8df3e7e99abd454101c64f6b9c056c933d36c304fc2f942b63a5ad"} Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.387161 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-jm44j" event={"ID":"67ad9026-615e-42f4-a7da-d0091bb56805","Type":"ContainerStarted","Data":"b1296b5982f066d534f6b2f39914f5983f9a7534f487d51740aaa79d4266ed52"} Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.862758 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-jm44j"] Dec 01 21:12:23 crc kubenswrapper[4852]: I1201 21:12:23.872344 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-jm44j"] Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.781152 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.901782 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host\") pod \"67ad9026-615e-42f4-a7da-d0091bb56805\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.901885 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8xvg\" (UniqueName: \"kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg\") pod \"67ad9026-615e-42f4-a7da-d0091bb56805\" (UID: \"67ad9026-615e-42f4-a7da-d0091bb56805\") " Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.902214 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host" (OuterVolumeSpecName: "host") pod "67ad9026-615e-42f4-a7da-d0091bb56805" (UID: "67ad9026-615e-42f4-a7da-d0091bb56805"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.902711 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67ad9026-615e-42f4-a7da-d0091bb56805-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:24 crc kubenswrapper[4852]: I1201 21:12:24.908157 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg" (OuterVolumeSpecName: "kube-api-access-g8xvg") pod "67ad9026-615e-42f4-a7da-d0091bb56805" (UID: "67ad9026-615e-42f4-a7da-d0091bb56805"). InnerVolumeSpecName "kube-api-access-g8xvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.000951 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:25 crc kubenswrapper[4852]: E1201 21:12:25.001769 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67ad9026-615e-42f4-a7da-d0091bb56805" containerName="container-00" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.001792 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="67ad9026-615e-42f4-a7da-d0091bb56805" containerName="container-00" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.002063 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="67ad9026-615e-42f4-a7da-d0091bb56805" containerName="container-00" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.003908 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.004426 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8xvg\" (UniqueName: \"kubernetes.io/projected/67ad9026-615e-42f4-a7da-d0091bb56805-kube-api-access-g8xvg\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.014677 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.106060 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.106186 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c7qc\" (UniqueName: \"kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.106233 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.119825 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85nxj/crc-debug-d8pdb"] Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.124352 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208076 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c7qc\" (UniqueName: \"kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208191 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208335 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208392 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208586 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l22tm\" (UniqueName: \"kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208763 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.208881 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.240252 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c7qc\" (UniqueName: \"kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc\") pod \"redhat-marketplace-4hnt9\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.309913 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l22tm\" (UniqueName: \"kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.310072 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.310181 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.322739 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.329011 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l22tm\" (UniqueName: \"kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm\") pod \"crc-debug-d8pdb\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.424162 4852 scope.go:117] "RemoveContainer" containerID="04967e514f8df3e7e99abd454101c64f6b9c056c933d36c304fc2f942b63a5ad" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.424215 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-jm44j" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.439613 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:25 crc kubenswrapper[4852]: I1201 21:12:25.860433 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.343663 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67ad9026-615e-42f4-a7da-d0091bb56805" path="/var/lib/kubelet/pods/67ad9026-615e-42f4-a7da-d0091bb56805/volumes" Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.434693 4852 generic.go:334] "Generic (PLEG): container finished" podID="f42ca025-92ab-44d1-8222-842a440c4a53" containerID="dcc333dc142b71c92454b081f4018f206b27fa94df84ba60f8f1639b66fe8873" exitCode=0 Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.434754 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerDied","Data":"dcc333dc142b71c92454b081f4018f206b27fa94df84ba60f8f1639b66fe8873"} Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.434825 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerStarted","Data":"e2e79c05caa10e704a11abdb90c9b626ee4f3125a1801457d279e6cbe2a4208b"} Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.437320 4852 generic.go:334] "Generic (PLEG): container finished" podID="c18d59ad-d265-4cad-9021-c6ce15ab216f" containerID="096c1a0a846c22507b92875ea7baffb54f64e3ad8d622b4a860b9de116aa7897" exitCode=0 Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.437385 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" event={"ID":"c18d59ad-d265-4cad-9021-c6ce15ab216f","Type":"ContainerDied","Data":"096c1a0a846c22507b92875ea7baffb54f64e3ad8d622b4a860b9de116aa7897"} Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.437410 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" event={"ID":"c18d59ad-d265-4cad-9021-c6ce15ab216f","Type":"ContainerStarted","Data":"d30bf3cc89cf8ade8cafa662e163cbae5c6d0c1bcba00216378c1c65d1dd5aaf"} Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.492113 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-d8pdb"] Dec 01 21:12:26 crc kubenswrapper[4852]: I1201 21:12:26.503329 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85nxj/crc-debug-d8pdb"] Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.453710 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerStarted","Data":"e5339f27ca2994002cb46cd55fe59c0566da8f82801fae66521586d47388dd48"} Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.551303 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.664515 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host\") pod \"c18d59ad-d265-4cad-9021-c6ce15ab216f\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.664605 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host" (OuterVolumeSpecName: "host") pod "c18d59ad-d265-4cad-9021-c6ce15ab216f" (UID: "c18d59ad-d265-4cad-9021-c6ce15ab216f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.664755 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l22tm\" (UniqueName: \"kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm\") pod \"c18d59ad-d265-4cad-9021-c6ce15ab216f\" (UID: \"c18d59ad-d265-4cad-9021-c6ce15ab216f\") " Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.665231 4852 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c18d59ad-d265-4cad-9021-c6ce15ab216f-host\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.673399 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm" (OuterVolumeSpecName: "kube-api-access-l22tm") pod "c18d59ad-d265-4cad-9021-c6ce15ab216f" (UID: "c18d59ad-d265-4cad-9021-c6ce15ab216f"). InnerVolumeSpecName "kube-api-access-l22tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:12:27 crc kubenswrapper[4852]: I1201 21:12:27.767143 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l22tm\" (UniqueName: \"kubernetes.io/projected/c18d59ad-d265-4cad-9021-c6ce15ab216f-kube-api-access-l22tm\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:28 crc kubenswrapper[4852]: I1201 21:12:28.329468 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c18d59ad-d265-4cad-9021-c6ce15ab216f" path="/var/lib/kubelet/pods/c18d59ad-d265-4cad-9021-c6ce15ab216f/volumes" Dec 01 21:12:28 crc kubenswrapper[4852]: I1201 21:12:28.464502 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/crc-debug-d8pdb" Dec 01 21:12:28 crc kubenswrapper[4852]: I1201 21:12:28.465435 4852 scope.go:117] "RemoveContainer" containerID="096c1a0a846c22507b92875ea7baffb54f64e3ad8d622b4a860b9de116aa7897" Dec 01 21:12:28 crc kubenswrapper[4852]: I1201 21:12:28.466620 4852 generic.go:334] "Generic (PLEG): container finished" podID="f42ca025-92ab-44d1-8222-842a440c4a53" containerID="e5339f27ca2994002cb46cd55fe59c0566da8f82801fae66521586d47388dd48" exitCode=0 Dec 01 21:12:28 crc kubenswrapper[4852]: I1201 21:12:28.466655 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerDied","Data":"e5339f27ca2994002cb46cd55fe59c0566da8f82801fae66521586d47388dd48"} Dec 01 21:12:29 crc kubenswrapper[4852]: I1201 21:12:29.486443 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerStarted","Data":"a504f987f5d7d0e6fabcdc40b3d6b6e258f96a0132f647f21902ccf9d2733302"} Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.323184 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.323715 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.379220 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.401350 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4hnt9" podStartSLOduration=8.803773141 podStartE2EDuration="11.401325705s" podCreationTimestamp="2025-12-01 21:12:24 +0000 UTC" firstStartedPulling="2025-12-01 21:12:26.436812443 +0000 UTC m=+4066.363893850" lastFinishedPulling="2025-12-01 21:12:29.034364997 +0000 UTC m=+4068.961446414" observedRunningTime="2025-12-01 21:12:29.50604067 +0000 UTC m=+4069.433122127" watchObservedRunningTime="2025-12-01 21:12:35.401325705 +0000 UTC m=+4075.328407122" Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.591666 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:35 crc kubenswrapper[4852]: I1201 21:12:35.663280 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:37 crc kubenswrapper[4852]: I1201 21:12:37.564398 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4hnt9" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="registry-server" containerID="cri-o://a504f987f5d7d0e6fabcdc40b3d6b6e258f96a0132f647f21902ccf9d2733302" gracePeriod=2 Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.580833 4852 generic.go:334] "Generic (PLEG): container finished" podID="f42ca025-92ab-44d1-8222-842a440c4a53" containerID="a504f987f5d7d0e6fabcdc40b3d6b6e258f96a0132f647f21902ccf9d2733302" exitCode=0 Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.580912 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerDied","Data":"a504f987f5d7d0e6fabcdc40b3d6b6e258f96a0132f647f21902ccf9d2733302"} Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.581417 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4hnt9" event={"ID":"f42ca025-92ab-44d1-8222-842a440c4a53","Type":"ContainerDied","Data":"e2e79c05caa10e704a11abdb90c9b626ee4f3125a1801457d279e6cbe2a4208b"} Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.581439 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2e79c05caa10e704a11abdb90c9b626ee4f3125a1801457d279e6cbe2a4208b" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.653287 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.808914 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities\") pod \"f42ca025-92ab-44d1-8222-842a440c4a53\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.808978 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content\") pod \"f42ca025-92ab-44d1-8222-842a440c4a53\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.809060 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c7qc\" (UniqueName: \"kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc\") pod \"f42ca025-92ab-44d1-8222-842a440c4a53\" (UID: \"f42ca025-92ab-44d1-8222-842a440c4a53\") " Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.810018 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities" (OuterVolumeSpecName: "utilities") pod "f42ca025-92ab-44d1-8222-842a440c4a53" (UID: "f42ca025-92ab-44d1-8222-842a440c4a53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.815284 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc" (OuterVolumeSpecName: "kube-api-access-4c7qc") pod "f42ca025-92ab-44d1-8222-842a440c4a53" (UID: "f42ca025-92ab-44d1-8222-842a440c4a53"). InnerVolumeSpecName "kube-api-access-4c7qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.835363 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f42ca025-92ab-44d1-8222-842a440c4a53" (UID: "f42ca025-92ab-44d1-8222-842a440c4a53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.911688 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c7qc\" (UniqueName: \"kubernetes.io/projected/f42ca025-92ab-44d1-8222-842a440c4a53-kube-api-access-4c7qc\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.911725 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:38 crc kubenswrapper[4852]: I1201 21:12:38.911738 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f42ca025-92ab-44d1-8222-842a440c4a53-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:12:39 crc kubenswrapper[4852]: I1201 21:12:39.589838 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4hnt9" Dec 01 21:12:39 crc kubenswrapper[4852]: I1201 21:12:39.638869 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:39 crc kubenswrapper[4852]: I1201 21:12:39.650665 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4hnt9"] Dec 01 21:12:40 crc kubenswrapper[4852]: I1201 21:12:40.336842 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" path="/var/lib/kubelet/pods/f42ca025-92ab-44d1-8222-842a440c4a53/volumes" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.326421 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6b6b9785cb-jncbj_66e282d8-f6fc-4c9b-84e2-398efd252579/barbican-api/0.log" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.632070 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6b6b9785cb-jncbj_66e282d8-f6fc-4c9b-84e2-398efd252579/barbican-api-log/0.log" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.692895 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6ffc9cc59d-mbzlh_d6030470-6a0e-43fc-ae0c-755a3d4a9980/barbican-keystone-listener/0.log" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.728704 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6ffc9cc59d-mbzlh_d6030470-6a0e-43fc-ae0c-755a3d4a9980/barbican-keystone-listener-log/0.log" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.855671 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5dcb96d78f-vvtc5_6ec762a2-a422-40fd-91a8-fdaf58be343c/barbican-worker/0.log" Dec 01 21:12:57 crc kubenswrapper[4852]: I1201 21:12:57.930742 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5dcb96d78f-vvtc5_6ec762a2-a422-40fd-91a8-fdaf58be343c/barbican-worker-log/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.073258 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6lkvm_03d266a8-6787-4bc8-8836-d11fb0d078b4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.188065 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/ceilometer-central-agent/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.214620 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/ceilometer-notification-agent/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.289633 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/proxy-httpd/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.382290 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_113952dd-818a-4d1d-a610-789c5cec4238/sg-core/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.483101 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f20e47a0-b3f2-48e5-baae-1e75e24377ac/cinder-api/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.527108 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f20e47a0-b3f2-48e5-baae-1e75e24377ac/cinder-api-log/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.683422 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a/cinder-scheduler/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.737842 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9ace44ea-d6ec-45a2-b3d4-a9c2e8b3998a/probe/0.log" Dec 01 21:12:58 crc kubenswrapper[4852]: I1201 21:12:58.842263 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-5hh5c_d7fb0098-9a59-4686-a483-8a1361628214/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.380906 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/init/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.401799 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cvhrr_5839b9be-5c81-47e2-b392-bf8652b0403e/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.531587 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/init/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.584905 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dddb89-qnwhw_e04bb162-f05c-4844-b368-70764dce284d/dnsmasq-dns/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.617371 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q7pls_f399c1ee-c0af-4085-953e-6333beb90786/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.839997 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_02fed1df-7a8d-41ed-8662-17ecda728c06/glance-httpd/0.log" Dec 01 21:12:59 crc kubenswrapper[4852]: I1201 21:12:59.881218 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_02fed1df-7a8d-41ed-8662-17ecda728c06/glance-log/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.013978 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cbf2dd-2b08-4fa7-9530-e5835103a6d3/glance-httpd/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.058668 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cbf2dd-2b08-4fa7-9530-e5835103a6d3/glance-log/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.241768 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfd6f888-xxwbg_160a77b2-5ec6-4223-b939-8e90b339f530/horizon/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.377772 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-s57rw_a0df6c51-df24-475e-b857-39aafce2f093/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.553300 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfd6f888-xxwbg_160a77b2-5ec6-4223-b939-8e90b339f530/horizon-log/0.log" Dec 01 21:13:00 crc kubenswrapper[4852]: I1201 21:13:00.708789 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pn2q6_4055e3d3-767a-4a20-95e4-dda1685cbe61/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.288928 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410381-ttp6l_5b1bf086-0cbc-468b-baa4-9c7f6bf1e642/keystone-cron/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.421434 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-574c7f8dfc-6k2xn_d5fbd8b0-59fb-402e-8442-0302ea125e49/keystone-api/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.530907 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_2896f52e-0a75-4d18-b72b-66b173aaa3b2/kube-state-metrics/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.571144 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8m5nv_6e87649b-f17b-4067-9803-f1cd06147f82/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.857678 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55686cd79f-5wjtq_fc88aba6-cf67-4609-9a04-797090fcce15/neutron-httpd/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.874967 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-55686cd79f-5wjtq_fc88aba6-cf67-4609-9a04-797090fcce15/neutron-api/0.log" Dec 01 21:13:01 crc kubenswrapper[4852]: I1201 21:13:01.937212 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dv9qr_4f398cd5-1aca-4178-9fcd-50a3bb15bfec/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:02 crc kubenswrapper[4852]: I1201 21:13:02.381552 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_623fc41b-9221-407d-a5d7-e59ce151725a/nova-api-log/0.log" Dec 01 21:13:02 crc kubenswrapper[4852]: I1201 21:13:02.542903 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_30f58a14-9ee3-44ea-9737-f14510a50b29/nova-cell0-conductor-conductor/0.log" Dec 01 21:13:02 crc kubenswrapper[4852]: I1201 21:13:02.836635 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9a9f4eb0-aae2-4c56-aab8-48a3c6b9fe04/nova-cell1-conductor-conductor/0.log" Dec 01 21:13:02 crc kubenswrapper[4852]: I1201 21:13:02.907539 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_fb428793-fd4f-4e29-a7e1-7c5b539d01d7/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 21:13:02 crc kubenswrapper[4852]: I1201 21:13:02.938848 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_623fc41b-9221-407d-a5d7-e59ce151725a/nova-api-api/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.023689 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9669b_aaf7fe95-5448-404e-b2f4-7bac25b267db/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.248050 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b254248d-f4d2-454d-bc92-09e0d709a0b8/nova-metadata-log/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.512077 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f04ccdb1-afe8-4f1c-b475-b10384993bdc/nova-scheduler-scheduler/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.516821 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/mysql-bootstrap/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.696802 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/mysql-bootstrap/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.736503 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_775ec07e-8dd8-47f7-94f1-4c5355335a82/galera/0.log" Dec 01 21:13:03 crc kubenswrapper[4852]: I1201 21:13:03.899274 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/mysql-bootstrap/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.144041 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/galera/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.153594 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a512edf-0808-47a9-91dd-81da3cf1cda9/mysql-bootstrap/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.368254 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_49382464-d20a-4ec7-9096-5679b0fc12b7/openstackclient/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.380711 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8kwmk_5ef29299-3043-4921-b77b-07416d89ed96/ovn-controller/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.562652 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b254248d-f4d2-454d-bc92-09e0d709a0b8/nova-metadata-metadata/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.580885 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-86jpt_b30528cc-b404-4564-bcac-da1fdc60ae52/openstack-network-exporter/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.775080 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server-init/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.921985 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server-init/0.log" Dec 01 21:13:04 crc kubenswrapper[4852]: I1201 21:13:04.974870 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovsdb-server/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.015210 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rwcdk_d0b85f35-5e0a-45ed-a162-02c81ffbdedb/ovs-vswitchd/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.156133 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-sphgd_9f4a4b48-5ead-42a3-9438-ec9103db3b39/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.235141 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7170ebb9-5806-4a03-8316-8c396a916197/openstack-network-exporter/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.285762 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7170ebb9-5806-4a03-8316-8c396a916197/ovn-northd/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.482392 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e569dc0-0de8-47cf-a1d3-1e649efde4af/openstack-network-exporter/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.488686 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e569dc0-0de8-47cf-a1d3-1e649efde4af/ovsdbserver-nb/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.622722 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ff6ee0b-6797-494c-8166-88c5cc7cf3fe/openstack-network-exporter/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.695355 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ff6ee0b-6797-494c-8166-88c5cc7cf3fe/ovsdbserver-sb/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.788789 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-fd7b4cb9d-8zvhn_e12728e7-2002-493c-ad13-3bbb68e8ecf7/placement-api/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.895698 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/setup-container/0.log" Dec 01 21:13:05 crc kubenswrapper[4852]: I1201 21:13:05.931764 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-fd7b4cb9d-8zvhn_e12728e7-2002-493c-ad13-3bbb68e8ecf7/placement-log/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.075780 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/setup-container/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.103097 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_5eb0a95a-7ba8-48aa-80bc-245c195063b0/rabbitmq/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.140767 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/setup-container/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.303555 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/setup-container/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.337745 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b1036e7-b15b-4b19-bac9-4ce322698550/rabbitmq/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.461310 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-c6ftq_23761811-cb87-42a7-b8a4-1ababc02ac47/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.524369 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-26vmn_79e71e37-cc0d-42e9-89dd-9cb4722aa53a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.709161 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-hcpn6_3edb936c-fcd0-4599-9c43-6ed0a4b957c4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.720178 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fwc7m_68042211-6c10-446e-bf41-ebfeff2a87ef/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:06 crc kubenswrapper[4852]: I1201 21:13:06.945372 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w5hlg_78d28364-d4ba-45c7-be9a-d3a138e64800/ssh-known-hosts-edpm-deployment/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.139825 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-cd9d56787-qlkbk_aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c/proxy-server/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.175114 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-cd9d56787-qlkbk_aa8b4cc7-fcf4-4d04-9a18-81bcf34baf7c/proxy-httpd/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.242353 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-v8pzw_d31bdc12-ed48-45e2-b990-2b098be82119/swift-ring-rebalance/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.381435 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-auditor/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.389079 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-reaper/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.473631 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-replicator/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.554803 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/account-server/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.585117 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-auditor/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.621973 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-replicator/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.637549 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-server/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.781268 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/container-updater/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.789564 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-expirer/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.830358 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-auditor/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.884074 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-replicator/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.984750 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-server/0.log" Dec 01 21:13:07 crc kubenswrapper[4852]: I1201 21:13:07.988558 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/object-updater/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.060765 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/rsync/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.079319 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0eec4983-f32d-4858-a382-eacc49d726fd/swift-recon-cron/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.237007 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-8xsfx_59dae619-1f63-4b50-84ed-037a15a55876/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.293505 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_db1dc4fa-69a3-4c29-b69b-f6080f275e97/tempest-tests-tempest-tests-runner/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.464208 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b347c57e-1cf3-41f2-9b2a-3ca688e412f4/test-operator-logs-container/0.log" Dec 01 21:13:08 crc kubenswrapper[4852]: I1201 21:13:08.579095 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-tjpbm_991ddb8f-bb11-4661-9604-2663fc221fc8/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 21:13:19 crc kubenswrapper[4852]: I1201 21:13:19.623013 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_5724232f-c6e6-4356-b4b2-a622191bedaf/memcached/0.log" Dec 01 21:13:20 crc kubenswrapper[4852]: I1201 21:13:20.230208 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:13:20 crc kubenswrapper[4852]: I1201 21:13:20.230286 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:13:38 crc kubenswrapper[4852]: I1201 21:13:38.944047 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.089953 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.096436 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.098979 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.475608 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/util/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.491517 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/extract/0.log" Dec 01 21:13:39 crc kubenswrapper[4852]: I1201 21:13:39.538666 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bj7cbw_894e8bba-50f5-4183-87b8-171444b663d3/pull/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.017200 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bd4jr_7d884d8c-acfc-47fe-bee2-f0248f8b0eea/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.032845 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bd4jr_7d884d8c-acfc-47fe-bee2-f0248f8b0eea/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.073004 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2khxz_152c7905-249d-4195-afe6-7b02b5d8267d/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.256978 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2khxz_152c7905-249d-4195-afe6-7b02b5d8267d/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.296075 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-gtsjg_714caded-89c7-44a3-a832-2fbaa0e00ac2/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.348081 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-gtsjg_714caded-89c7-44a3-a832-2fbaa0e00ac2/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.508832 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-ccgjt_efb3ff96-731f-4a01-8bed-636717f36fb4/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.597736 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-ccgjt_efb3ff96-731f-4a01-8bed-636717f36fb4/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.601870 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-z7gzm_b9f7343b-9bba-43e0-bb25-80a5f3fb139f/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.682507 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-z7gzm_b9f7343b-9bba-43e0-bb25-80a5f3fb139f/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.764151 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q7fhl_fa3d71fd-60b1-488c-9ae0-abb212b8d0a4/kube-rbac-proxy/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.774610 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q7fhl_fa3d71fd-60b1-488c-9ae0-abb212b8d0a4/manager/0.log" Dec 01 21:13:40 crc kubenswrapper[4852]: I1201 21:13:40.895263 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-7w8xr_d2869063-cc44-4cd4-b1f6-5b33a5250e77/kube-rbac-proxy/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.112058 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-7w8xr_d2869063-cc44-4cd4-b1f6-5b33a5250e77/manager/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.567255 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-4rphb_b7818ab0-4a52-48fe-a0c8-88d162745762/manager/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.587728 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-nh9t8_3aa88cab-a21d-40d4-b278-8c006ce138ff/kube-rbac-proxy/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.607324 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-4rphb_b7818ab0-4a52-48fe-a0c8-88d162745762/kube-rbac-proxy/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.782290 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-lz6m7_4908b6e5-acd8-4754-877f-18a3b8897aa5/kube-rbac-proxy/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.805616 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-nh9t8_3aa88cab-a21d-40d4-b278-8c006ce138ff/manager/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.823252 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-lz6m7_4908b6e5-acd8-4754-877f-18a3b8897aa5/manager/0.log" Dec 01 21:13:41 crc kubenswrapper[4852]: I1201 21:13:41.977542 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rpswz_6ded5e30-894b-4718-b10e-6cdcf29ea854/kube-rbac-proxy/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.040300 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rpswz_6ded5e30-894b-4718-b10e-6cdcf29ea854/manager/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.143346 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tdw8l_2842a3ca-0708-4395-babd-b9dbdc1509d8/kube-rbac-proxy/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.169498 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tdw8l_2842a3ca-0708-4395-babd-b9dbdc1509d8/manager/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.224745 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5mktq_4ec2c5f6-679b-4f91-ab45-3eae7b12cd13/kube-rbac-proxy/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.385551 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2phng_cc6a88c3-1e45-470c-ba3b-c15c83afbcec/kube-rbac-proxy/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.401321 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-5mktq_4ec2c5f6-679b-4f91-ab45-3eae7b12cd13/manager/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.405232 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2phng_cc6a88c3-1e45-470c-ba3b-c15c83afbcec/manager/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.542840 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446t9t22_c81cbe79-aa85-4707-a3d6-246bf422575b/manager/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.547866 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446t9t22_c81cbe79-aa85-4707-a3d6-246bf422575b/kube-rbac-proxy/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.910759 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-vz4mn_7c160373-5106-41ac-8e58-9ae48e82f5b5/operator/0.log" Dec 01 21:13:42 crc kubenswrapper[4852]: I1201 21:13:42.911227 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-kc7p9_3e4739d8-ba85-4187-9f0b-b51d0c81b8f5/registry-server/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.008143 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ktgl2_e3a2d94e-61fb-406b-be5d-4ae5f0c18fda/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.119572 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ktgl2_e3a2d94e-61fb-406b-be5d-4ae5f0c18fda/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.147869 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-bzkqb_8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.212077 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-bzkqb_8e323f2e-eab0-4e2f-9f8c-a6c892fd89ca/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.336244 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-pzvgc_35e98bd5-b71c-4842-9511-52b5c9d8e25a/operator/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.455746 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-blr2g_268f049d-790e-4b1f-958d-0f07ba335215/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.569931 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-blr2g_268f049d-790e-4b1f-958d-0f07ba335215/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.621718 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rqqkl_28cd4665-305c-4855-87c6-f267402d0b05/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.652812 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-qvhk9_980395ee-3c8d-41a7-9663-7bc33fb4cd46/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.711646 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-rqqkl_28cd4665-305c-4855-87c6-f267402d0b05/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.838404 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-hncmj_7240ba3b-5f4b-4c63-99cf-4fe68d720fb5/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.869089 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-hncmj_7240ba3b-5f4b-4c63-99cf-4fe68d720fb5/manager/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.920089 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lslxr_1ae195c5-0850-4ca5-85e4-abc7ac4d79dc/kube-rbac-proxy/0.log" Dec 01 21:13:43 crc kubenswrapper[4852]: I1201 21:13:43.970018 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lslxr_1ae195c5-0850-4ca5-85e4-abc7ac4d79dc/manager/0.log" Dec 01 21:13:50 crc kubenswrapper[4852]: I1201 21:13:50.229358 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:13:50 crc kubenswrapper[4852]: I1201 21:13:50.229956 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:14:05 crc kubenswrapper[4852]: I1201 21:14:05.235811 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-v6j29_08729539-55f5-4d1c-a952-9af42aa77b9c/control-plane-machine-set-operator/0.log" Dec 01 21:14:05 crc kubenswrapper[4852]: I1201 21:14:05.448692 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tb6ll_e476ff27-7c39-4627-b799-282107cac068/kube-rbac-proxy/0.log" Dec 01 21:14:05 crc kubenswrapper[4852]: I1201 21:14:05.476604 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tb6ll_e476ff27-7c39-4627-b799-282107cac068/machine-api-operator/0.log" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.041369 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-q996h_95f06176-4d0e-4c13-ae9b-1f0a4b7f6256/cert-manager-controller/0.log" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.229392 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.229829 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.230036 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.231260 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.231554 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f" gracePeriod=600 Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.586323 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f" exitCode=0 Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.586379 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f"} Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.586419 4852 scope.go:117] "RemoveContainer" containerID="f592b91808607f51b78f828572388805cc47bbb2f8ecb28739ac487025ceb613" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.796603 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-mf8nb_6686a8a5-9086-46eb-a481-5ed17b0e1318/cert-manager-cainjector/0.log" Dec 01 21:14:20 crc kubenswrapper[4852]: I1201 21:14:20.942051 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-twxqd_3155f5b4-1371-40c2-be4a-f099a19001a9/cert-manager-webhook/0.log" Dec 01 21:14:21 crc kubenswrapper[4852]: I1201 21:14:21.596632 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerStarted","Data":"7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1"} Dec 01 21:14:35 crc kubenswrapper[4852]: I1201 21:14:35.700702 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-vf2l4_f5d7a90a-1755-450a-903a-016f63394e43/nmstate-console-plugin/0.log" Dec 01 21:14:35 crc kubenswrapper[4852]: I1201 21:14:35.878323 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-dxw88_0d3319a8-2dcb-459f-9d3d-6f1eab59ae18/nmstate-handler/0.log" Dec 01 21:14:35 crc kubenswrapper[4852]: I1201 21:14:35.924541 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qdrrg_fe79317d-951e-446a-9ba2-0d272c5bd48c/kube-rbac-proxy/0.log" Dec 01 21:14:35 crc kubenswrapper[4852]: I1201 21:14:35.980070 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qdrrg_fe79317d-951e-446a-9ba2-0d272c5bd48c/nmstate-metrics/0.log" Dec 01 21:14:36 crc kubenswrapper[4852]: I1201 21:14:36.082278 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-jqpds_ce552c22-e8f7-4f0d-a5a3-055dd64a6123/nmstate-operator/0.log" Dec 01 21:14:36 crc kubenswrapper[4852]: I1201 21:14:36.192303 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-sk6kv_4816cba9-dcc7-48c9-b4b6-a41513a2611b/nmstate-webhook/0.log" Dec 01 21:14:51 crc kubenswrapper[4852]: I1201 21:14:51.817012 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-62hbm_29e50547-faa6-4d14-adee-5ea9e0264a42/kube-rbac-proxy/0.log" Dec 01 21:14:51 crc kubenswrapper[4852]: I1201 21:14:51.972791 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-62hbm_29e50547-faa6-4d14-adee-5ea9e0264a42/controller/0.log" Dec 01 21:14:52 crc kubenswrapper[4852]: I1201 21:14:52.039235 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.298085 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.312123 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.346487 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.377203 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.973982 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.982910 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.992659 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:14:53 crc kubenswrapper[4852]: I1201 21:14:53.997901 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.190355 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-frr-files/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.234383 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-reloader/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.234979 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/cp-metrics/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.269197 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/controller/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.408515 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/kube-rbac-proxy/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.436339 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/frr-metrics/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.457945 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/kube-rbac-proxy-frr/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.576322 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/reloader/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.659049 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-vwpnc_7b0c5529-861a-4fa1-82f7-72c2463171ee/frr-k8s-webhook-server/0.log" Dec 01 21:14:54 crc kubenswrapper[4852]: I1201 21:14:54.921509 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6ddd45494c-nkr5p_573029ff-5b2f-408d-aa44-da5d6ab202c0/manager/0.log" Dec 01 21:14:55 crc kubenswrapper[4852]: I1201 21:14:55.028713 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-58df767f78-7ndmk_13a3d5e7-f415-49e6-ac9b-6b9a3ab2027e/webhook-server/0.log" Dec 01 21:14:55 crc kubenswrapper[4852]: I1201 21:14:55.171589 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bs8kz_487029c0-a6d2-4f9a-a9d1-d819b22d1279/kube-rbac-proxy/0.log" Dec 01 21:14:55 crc kubenswrapper[4852]: I1201 21:14:55.745678 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bs8kz_487029c0-a6d2-4f9a-a9d1-d819b22d1279/speaker/0.log" Dec 01 21:14:55 crc kubenswrapper[4852]: I1201 21:14:55.903468 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dhxv4_8f00dfd9-29e9-420b-925f-8e875da31ee8/frr/0.log" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.196392 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k"] Dec 01 21:15:00 crc kubenswrapper[4852]: E1201 21:15:00.197767 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="extract-utilities" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.197792 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="extract-utilities" Dec 01 21:15:00 crc kubenswrapper[4852]: E1201 21:15:00.197822 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="extract-content" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.197834 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="extract-content" Dec 01 21:15:00 crc kubenswrapper[4852]: E1201 21:15:00.197852 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="registry-server" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.197866 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="registry-server" Dec 01 21:15:00 crc kubenswrapper[4852]: E1201 21:15:00.197909 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18d59ad-d265-4cad-9021-c6ce15ab216f" containerName="container-00" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.197923 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18d59ad-d265-4cad-9021-c6ce15ab216f" containerName="container-00" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.198295 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18d59ad-d265-4cad-9021-c6ce15ab216f" containerName="container-00" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.198353 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="f42ca025-92ab-44d1-8222-842a440c4a53" containerName="registry-server" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.199622 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.202722 4852 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.202926 4852 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.205761 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k"] Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.308618 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.308756 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2x95\" (UniqueName: \"kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.308842 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.410977 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.411098 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2x95\" (UniqueName: \"kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.411167 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.412180 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.417716 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.432374 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2x95\" (UniqueName: \"kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95\") pod \"collect-profiles-29410395-44p8k\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:00 crc kubenswrapper[4852]: I1201 21:15:00.540267 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:01 crc kubenswrapper[4852]: I1201 21:15:01.055877 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k"] Dec 01 21:15:01 crc kubenswrapper[4852]: I1201 21:15:01.986890 4852 generic.go:334] "Generic (PLEG): container finished" podID="4697db86-e324-4aa9-af8c-17758395bad8" containerID="39754253471ed40a771f65b664b87c508853d5d3466af2dc43231bea565f3e7c" exitCode=0 Dec 01 21:15:01 crc kubenswrapper[4852]: I1201 21:15:01.986971 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" event={"ID":"4697db86-e324-4aa9-af8c-17758395bad8","Type":"ContainerDied","Data":"39754253471ed40a771f65b664b87c508853d5d3466af2dc43231bea565f3e7c"} Dec 01 21:15:01 crc kubenswrapper[4852]: I1201 21:15:01.987287 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" event={"ID":"4697db86-e324-4aa9-af8c-17758395bad8","Type":"ContainerStarted","Data":"14321146df63deaf1a120dd5e679126fe16dd9e41fe2d52f6469b4c7bf85134f"} Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.424930 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.476047 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume\") pod \"4697db86-e324-4aa9-af8c-17758395bad8\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.476154 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2x95\" (UniqueName: \"kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95\") pod \"4697db86-e324-4aa9-af8c-17758395bad8\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.476341 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume\") pod \"4697db86-e324-4aa9-af8c-17758395bad8\" (UID: \"4697db86-e324-4aa9-af8c-17758395bad8\") " Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.478276 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume" (OuterVolumeSpecName: "config-volume") pod "4697db86-e324-4aa9-af8c-17758395bad8" (UID: "4697db86-e324-4aa9-af8c-17758395bad8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.484744 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95" (OuterVolumeSpecName: "kube-api-access-d2x95") pod "4697db86-e324-4aa9-af8c-17758395bad8" (UID: "4697db86-e324-4aa9-af8c-17758395bad8"). InnerVolumeSpecName "kube-api-access-d2x95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.487333 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4697db86-e324-4aa9-af8c-17758395bad8" (UID: "4697db86-e324-4aa9-af8c-17758395bad8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.579702 4852 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4697db86-e324-4aa9-af8c-17758395bad8-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.579740 4852 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4697db86-e324-4aa9-af8c-17758395bad8-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 21:15:03 crc kubenswrapper[4852]: I1201 21:15:03.579773 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2x95\" (UniqueName: \"kubernetes.io/projected/4697db86-e324-4aa9-af8c-17758395bad8-kube-api-access-d2x95\") on node \"crc\" DevicePath \"\"" Dec 01 21:15:04 crc kubenswrapper[4852]: I1201 21:15:04.011664 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" event={"ID":"4697db86-e324-4aa9-af8c-17758395bad8","Type":"ContainerDied","Data":"14321146df63deaf1a120dd5e679126fe16dd9e41fe2d52f6469b4c7bf85134f"} Dec 01 21:15:04 crc kubenswrapper[4852]: I1201 21:15:04.011717 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14321146df63deaf1a120dd5e679126fe16dd9e41fe2d52f6469b4c7bf85134f" Dec 01 21:15:04 crc kubenswrapper[4852]: I1201 21:15:04.011840 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410395-44p8k" Dec 01 21:15:04 crc kubenswrapper[4852]: I1201 21:15:04.512595 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q"] Dec 01 21:15:04 crc kubenswrapper[4852]: I1201 21:15:04.521737 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-6666q"] Dec 01 21:15:06 crc kubenswrapper[4852]: I1201 21:15:06.335104 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbbe5ad8-65e3-4453-b3a2-335195a08269" path="/var/lib/kubelet/pods/dbbe5ad8-65e3-4453-b3a2-335195a08269/volumes" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.232168 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.412638 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.438604 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.489089 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.592582 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/util/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.595052 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/pull/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.637356 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fb4rkh_2c6c4086-9a8b-4563-907e-f1fd309d9cbd/extract/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.739574 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.932414 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.945967 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:15:10 crc kubenswrapper[4852]: I1201 21:15:10.947741 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.113306 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/util/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.117807 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/pull/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.134847 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pjjrm_26da957e-6f2b-4801-9186-d46cb87b1cc7/extract/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.283154 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.502113 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.504553 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.538135 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.699525 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-utilities/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.701028 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/extract-content/0.log" Dec 01 21:15:11 crc kubenswrapper[4852]: I1201 21:15:11.914035 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.091426 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.113477 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.122137 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.210149 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-sqdrn_f798cf97-cdb1-43d0-b586-8cfa4fbb71ed/registry-server/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.319979 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-utilities/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.387188 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/extract-content/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.533051 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/3.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.635091 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vbxrx_674a3020-de73-41ff-b140-3ab2bc9d11aa/marketplace-operator/2.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.782147 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.921649 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-h6p7j_bdfa42a5-ee04-4343-a772-bacb19117993/registry-server/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.962697 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.962854 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:15:12 crc kubenswrapper[4852]: I1201 21:15:12.967704 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.138886 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-utilities/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.143107 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/extract-content/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.283145 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xcpbh_2fb8eb65-b853-4f81-9650-abde242b8300/registry-server/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.350176 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.485810 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.521386 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.539771 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.732030 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-utilities/0.log" Dec 01 21:15:13 crc kubenswrapper[4852]: I1201 21:15:13.732548 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/extract-content/0.log" Dec 01 21:15:14 crc kubenswrapper[4852]: I1201 21:15:14.065333 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6vzc5_6e5d579f-42bd-43e4-92b0-fba046d9f9a3/registry-server/0.log" Dec 01 21:15:58 crc kubenswrapper[4852]: I1201 21:15:58.687387 4852 scope.go:117] "RemoveContainer" containerID="62dc6f8439dcc4dc8a25bbeff1400744884e1a7bc33297e8ee9ca9a0d63cea0d" Dec 01 21:16:50 crc kubenswrapper[4852]: I1201 21:16:50.230344 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:16:50 crc kubenswrapper[4852]: I1201 21:16:50.231124 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:16:52 crc kubenswrapper[4852]: I1201 21:16:52.433245 4852 generic.go:334] "Generic (PLEG): container finished" podID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerID="aedb19257f37274aa00b07832c465df5437883a1128edc93212e59a6b68ba127" exitCode=0 Dec 01 21:16:52 crc kubenswrapper[4852]: I1201 21:16:52.433370 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85nxj/must-gather-bmzf4" event={"ID":"b41cb2be-dc82-4acc-a3fa-e0110c3934e1","Type":"ContainerDied","Data":"aedb19257f37274aa00b07832c465df5437883a1128edc93212e59a6b68ba127"} Dec 01 21:16:52 crc kubenswrapper[4852]: I1201 21:16:52.434934 4852 scope.go:117] "RemoveContainer" containerID="aedb19257f37274aa00b07832c465df5437883a1128edc93212e59a6b68ba127" Dec 01 21:16:53 crc kubenswrapper[4852]: I1201 21:16:53.381635 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85nxj_must-gather-bmzf4_b41cb2be-dc82-4acc-a3fa-e0110c3934e1/gather/0.log" Dec 01 21:17:03 crc kubenswrapper[4852]: I1201 21:17:03.984390 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85nxj/must-gather-bmzf4"] Dec 01 21:17:03 crc kubenswrapper[4852]: I1201 21:17:03.986081 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-85nxj/must-gather-bmzf4" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="copy" containerID="cri-o://b82de4c811b7e344cc555a464757b68d18bfb4582810f575a2a881449af77d2c" gracePeriod=2 Dec 01 21:17:04 crc kubenswrapper[4852]: I1201 21:17:04.001639 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85nxj/must-gather-bmzf4"] Dec 01 21:17:04 crc kubenswrapper[4852]: I1201 21:17:04.597108 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85nxj_must-gather-bmzf4_b41cb2be-dc82-4acc-a3fa-e0110c3934e1/copy/0.log" Dec 01 21:17:04 crc kubenswrapper[4852]: I1201 21:17:04.603962 4852 generic.go:334] "Generic (PLEG): container finished" podID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerID="b82de4c811b7e344cc555a464757b68d18bfb4582810f575a2a881449af77d2c" exitCode=143 Dec 01 21:17:04 crc kubenswrapper[4852]: I1201 21:17:04.867940 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85nxj_must-gather-bmzf4_b41cb2be-dc82-4acc-a3fa-e0110c3934e1/copy/0.log" Dec 01 21:17:04 crc kubenswrapper[4852]: I1201 21:17:04.868369 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.032574 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzq6j\" (UniqueName: \"kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j\") pod \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.032680 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output\") pod \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\" (UID: \"b41cb2be-dc82-4acc-a3fa-e0110c3934e1\") " Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.041588 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j" (OuterVolumeSpecName: "kube-api-access-dzq6j") pod "b41cb2be-dc82-4acc-a3fa-e0110c3934e1" (UID: "b41cb2be-dc82-4acc-a3fa-e0110c3934e1"). InnerVolumeSpecName "kube-api-access-dzq6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.135801 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzq6j\" (UniqueName: \"kubernetes.io/projected/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-kube-api-access-dzq6j\") on node \"crc\" DevicePath \"\"" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.179067 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "b41cb2be-dc82-4acc-a3fa-e0110c3934e1" (UID: "b41cb2be-dc82-4acc-a3fa-e0110c3934e1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.238258 4852 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b41cb2be-dc82-4acc-a3fa-e0110c3934e1-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.621574 4852 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85nxj_must-gather-bmzf4_b41cb2be-dc82-4acc-a3fa-e0110c3934e1/copy/0.log" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.622771 4852 scope.go:117] "RemoveContainer" containerID="b82de4c811b7e344cc555a464757b68d18bfb4582810f575a2a881449af77d2c" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.623010 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85nxj/must-gather-bmzf4" Dec 01 21:17:05 crc kubenswrapper[4852]: I1201 21:17:05.656915 4852 scope.go:117] "RemoveContainer" containerID="aedb19257f37274aa00b07832c465df5437883a1128edc93212e59a6b68ba127" Dec 01 21:17:06 crc kubenswrapper[4852]: I1201 21:17:06.335570 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" path="/var/lib/kubelet/pods/b41cb2be-dc82-4acc-a3fa-e0110c3934e1/volumes" Dec 01 21:17:20 crc kubenswrapper[4852]: I1201 21:17:20.229597 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:17:20 crc kubenswrapper[4852]: I1201 21:17:20.230505 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:17:50 crc kubenswrapper[4852]: I1201 21:17:50.230343 4852 patch_prober.go:28] interesting pod/machine-config-daemon-j25pb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 21:17:50 crc kubenswrapper[4852]: I1201 21:17:50.230935 4852 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 21:17:50 crc kubenswrapper[4852]: I1201 21:17:50.230994 4852 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" Dec 01 21:17:50 crc kubenswrapper[4852]: I1201 21:17:50.232210 4852 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1"} pod="openshift-machine-config-operator/machine-config-daemon-j25pb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 21:17:50 crc kubenswrapper[4852]: I1201 21:17:50.232351 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" containerName="machine-config-daemon" containerID="cri-o://7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" gracePeriod=600 Dec 01 21:17:50 crc kubenswrapper[4852]: E1201 21:17:50.363602 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:17:51 crc kubenswrapper[4852]: I1201 21:17:51.135192 4852 generic.go:334] "Generic (PLEG): container finished" podID="e823f9e3-954c-4254-9f06-893905a28152" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" exitCode=0 Dec 01 21:17:51 crc kubenswrapper[4852]: I1201 21:17:51.135260 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" event={"ID":"e823f9e3-954c-4254-9f06-893905a28152","Type":"ContainerDied","Data":"7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1"} Dec 01 21:17:51 crc kubenswrapper[4852]: I1201 21:17:51.135334 4852 scope.go:117] "RemoveContainer" containerID="16152c8642f1b4c636ab5a6c74e7e424698b8e6dbec3d600336e8d1ba31f137f" Dec 01 21:17:51 crc kubenswrapper[4852]: I1201 21:17:51.136550 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:17:51 crc kubenswrapper[4852]: E1201 21:17:51.137258 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:03 crc kubenswrapper[4852]: I1201 21:18:03.319873 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:18:03 crc kubenswrapper[4852]: E1201 21:18:03.322000 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:18 crc kubenswrapper[4852]: I1201 21:18:18.319977 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:18:18 crc kubenswrapper[4852]: E1201 21:18:18.320778 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:29 crc kubenswrapper[4852]: I1201 21:18:29.319791 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:18:29 crc kubenswrapper[4852]: E1201 21:18:29.320651 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:44 crc kubenswrapper[4852]: I1201 21:18:44.320174 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:18:44 crc kubenswrapper[4852]: E1201 21:18:44.321008 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:57 crc kubenswrapper[4852]: I1201 21:18:57.320711 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:18:57 crc kubenswrapper[4852]: E1201 21:18:57.321903 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:18:58 crc kubenswrapper[4852]: I1201 21:18:58.866138 4852 scope.go:117] "RemoveContainer" containerID="e5339f27ca2994002cb46cd55fe59c0566da8f82801fae66521586d47388dd48" Dec 01 21:18:58 crc kubenswrapper[4852]: I1201 21:18:58.903653 4852 scope.go:117] "RemoveContainer" containerID="dcc333dc142b71c92454b081f4018f206b27fa94df84ba60f8f1639b66fe8873" Dec 01 21:18:58 crc kubenswrapper[4852]: I1201 21:18:58.954877 4852 scope.go:117] "RemoveContainer" containerID="a504f987f5d7d0e6fabcdc40b3d6b6e258f96a0132f647f21902ccf9d2733302" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.792648 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:02 crc kubenswrapper[4852]: E1201 21:19:02.793902 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="copy" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.793925 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="copy" Dec 01 21:19:02 crc kubenswrapper[4852]: E1201 21:19:02.793984 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="gather" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.793996 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="gather" Dec 01 21:19:02 crc kubenswrapper[4852]: E1201 21:19:02.794018 4852 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4697db86-e324-4aa9-af8c-17758395bad8" containerName="collect-profiles" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.794035 4852 state_mem.go:107] "Deleted CPUSet assignment" podUID="4697db86-e324-4aa9-af8c-17758395bad8" containerName="collect-profiles" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.794376 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="copy" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.794423 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="b41cb2be-dc82-4acc-a3fa-e0110c3934e1" containerName="gather" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.794481 4852 memory_manager.go:354] "RemoveStaleState removing state" podUID="4697db86-e324-4aa9-af8c-17758395bad8" containerName="collect-profiles" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.797729 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.831639 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.900332 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.900585 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxrfp\" (UniqueName: \"kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:02 crc kubenswrapper[4852]: I1201 21:19:02.900680 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.002126 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxrfp\" (UniqueName: \"kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.002517 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.002689 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.003423 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.003434 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.021955 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxrfp\" (UniqueName: \"kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp\") pod \"redhat-operators-9dzgc\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.138191 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:03 crc kubenswrapper[4852]: I1201 21:19:03.646644 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:04 crc kubenswrapper[4852]: I1201 21:19:04.005296 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerStarted","Data":"7b43ac9d34e1321753bc2bc17418bdb90754aa23b97100bb338ee387c262a0c1"} Dec 01 21:19:05 crc kubenswrapper[4852]: I1201 21:19:05.019645 4852 generic.go:334] "Generic (PLEG): container finished" podID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" containerID="7a0e86fa8c950bfde2c032a2b93ee4300dfe23c5280d299ced8e12c3c01fb5c9" exitCode=0 Dec 01 21:19:05 crc kubenswrapper[4852]: I1201 21:19:05.019723 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerDied","Data":"7a0e86fa8c950bfde2c032a2b93ee4300dfe23c5280d299ced8e12c3c01fb5c9"} Dec 01 21:19:05 crc kubenswrapper[4852]: I1201 21:19:05.022963 4852 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.046516 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerStarted","Data":"0ea69d4c33784560d6f9917740b0b21061fa37966b7530c70e621cd76cea39d3"} Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.177188 4852 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.179308 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.189834 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.324037 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.324326 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.324391 4852 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7wwm\" (UniqueName: \"kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.425775 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.425825 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7wwm\" (UniqueName: \"kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.425942 4852 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.426281 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.426527 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.447428 4852 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7wwm\" (UniqueName: \"kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm\") pod \"community-operators-bn4t7\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:07 crc kubenswrapper[4852]: I1201 21:19:07.541585 4852 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:08 crc kubenswrapper[4852]: I1201 21:19:08.059976 4852 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:08 crc kubenswrapper[4852]: I1201 21:19:08.320666 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:19:08 crc kubenswrapper[4852]: E1201 21:19:08.321244 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:19:09 crc kubenswrapper[4852]: I1201 21:19:09.065625 4852 generic.go:334] "Generic (PLEG): container finished" podID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" containerID="0ea69d4c33784560d6f9917740b0b21061fa37966b7530c70e621cd76cea39d3" exitCode=0 Dec 01 21:19:09 crc kubenswrapper[4852]: I1201 21:19:09.065721 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerDied","Data":"0ea69d4c33784560d6f9917740b0b21061fa37966b7530c70e621cd76cea39d3"} Dec 01 21:19:09 crc kubenswrapper[4852]: I1201 21:19:09.067741 4852 generic.go:334] "Generic (PLEG): container finished" podID="197712b4-c784-4b37-a22a-6c2a0ad41a92" containerID="5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e" exitCode=0 Dec 01 21:19:09 crc kubenswrapper[4852]: I1201 21:19:09.067771 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerDied","Data":"5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e"} Dec 01 21:19:09 crc kubenswrapper[4852]: I1201 21:19:09.067798 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerStarted","Data":"bea72eeb97486952bf2d7ebf4767461b25033cdf91c4c6e4eb30f9da086994dd"} Dec 01 21:19:10 crc kubenswrapper[4852]: I1201 21:19:10.077902 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerStarted","Data":"ca5f4356027409200560729dc77231d9f8bc3b7b28c5bf91062a811310c41602"} Dec 01 21:19:10 crc kubenswrapper[4852]: I1201 21:19:10.079525 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerStarted","Data":"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780"} Dec 01 21:19:10 crc kubenswrapper[4852]: I1201 21:19:10.096048 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9dzgc" podStartSLOduration=3.56723553 podStartE2EDuration="8.096028529s" podCreationTimestamp="2025-12-01 21:19:02 +0000 UTC" firstStartedPulling="2025-12-01 21:19:05.022719673 +0000 UTC m=+4464.949801090" lastFinishedPulling="2025-12-01 21:19:09.551512662 +0000 UTC m=+4469.478594089" observedRunningTime="2025-12-01 21:19:10.093136438 +0000 UTC m=+4470.020217865" watchObservedRunningTime="2025-12-01 21:19:10.096028529 +0000 UTC m=+4470.023109946" Dec 01 21:19:11 crc kubenswrapper[4852]: I1201 21:19:11.090155 4852 generic.go:334] "Generic (PLEG): container finished" podID="197712b4-c784-4b37-a22a-6c2a0ad41a92" containerID="4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780" exitCode=0 Dec 01 21:19:11 crc kubenswrapper[4852]: I1201 21:19:11.090232 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerDied","Data":"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780"} Dec 01 21:19:12 crc kubenswrapper[4852]: I1201 21:19:12.102960 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerStarted","Data":"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255"} Dec 01 21:19:13 crc kubenswrapper[4852]: I1201 21:19:13.139267 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:13 crc kubenswrapper[4852]: I1201 21:19:13.139588 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:14 crc kubenswrapper[4852]: I1201 21:19:14.197594 4852 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9dzgc" podUID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" containerName="registry-server" probeResult="failure" output=< Dec 01 21:19:14 crc kubenswrapper[4852]: timeout: failed to connect service ":50051" within 1s Dec 01 21:19:14 crc kubenswrapper[4852]: > Dec 01 21:19:17 crc kubenswrapper[4852]: I1201 21:19:17.542041 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:17 crc kubenswrapper[4852]: I1201 21:19:17.542727 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:17 crc kubenswrapper[4852]: I1201 21:19:17.586726 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:17 crc kubenswrapper[4852]: I1201 21:19:17.607321 4852 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bn4t7" podStartSLOduration=8.130350086 podStartE2EDuration="10.607298555s" podCreationTimestamp="2025-12-01 21:19:07 +0000 UTC" firstStartedPulling="2025-12-01 21:19:09.069431131 +0000 UTC m=+4468.996512548" lastFinishedPulling="2025-12-01 21:19:11.5463796 +0000 UTC m=+4471.473461017" observedRunningTime="2025-12-01 21:19:12.129853803 +0000 UTC m=+4472.056935230" watchObservedRunningTime="2025-12-01 21:19:17.607298555 +0000 UTC m=+4477.534379962" Dec 01 21:19:18 crc kubenswrapper[4852]: I1201 21:19:18.226177 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:18 crc kubenswrapper[4852]: I1201 21:19:18.286486 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:19 crc kubenswrapper[4852]: I1201 21:19:19.320654 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:19:19 crc kubenswrapper[4852]: E1201 21:19:19.321118 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:19:20 crc kubenswrapper[4852]: I1201 21:19:20.189896 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bn4t7" podUID="197712b4-c784-4b37-a22a-6c2a0ad41a92" containerName="registry-server" containerID="cri-o://4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255" gracePeriod=2 Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.663037 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.718791 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content\") pod \"197712b4-c784-4b37-a22a-6c2a0ad41a92\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.718851 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7wwm\" (UniqueName: \"kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm\") pod \"197712b4-c784-4b37-a22a-6c2a0ad41a92\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.718877 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities\") pod \"197712b4-c784-4b37-a22a-6c2a0ad41a92\" (UID: \"197712b4-c784-4b37-a22a-6c2a0ad41a92\") " Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.720596 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities" (OuterVolumeSpecName: "utilities") pod "197712b4-c784-4b37-a22a-6c2a0ad41a92" (UID: "197712b4-c784-4b37-a22a-6c2a0ad41a92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.726258 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm" (OuterVolumeSpecName: "kube-api-access-j7wwm") pod "197712b4-c784-4b37-a22a-6c2a0ad41a92" (UID: "197712b4-c784-4b37-a22a-6c2a0ad41a92"). InnerVolumeSpecName "kube-api-access-j7wwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.775029 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "197712b4-c784-4b37-a22a-6c2a0ad41a92" (UID: "197712b4-c784-4b37-a22a-6c2a0ad41a92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.820418 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.820446 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7wwm\" (UniqueName: \"kubernetes.io/projected/197712b4-c784-4b37-a22a-6c2a0ad41a92-kube-api-access-j7wwm\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:20.820475 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/197712b4-c784-4b37-a22a-6c2a0ad41a92-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.202859 4852 generic.go:334] "Generic (PLEG): container finished" podID="197712b4-c784-4b37-a22a-6c2a0ad41a92" containerID="4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255" exitCode=0 Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.202921 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerDied","Data":"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255"} Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.202960 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn4t7" event={"ID":"197712b4-c784-4b37-a22a-6c2a0ad41a92","Type":"ContainerDied","Data":"bea72eeb97486952bf2d7ebf4767461b25033cdf91c4c6e4eb30f9da086994dd"} Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.202985 4852 scope.go:117] "RemoveContainer" containerID="4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.203166 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn4t7" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.235784 4852 scope.go:117] "RemoveContainer" containerID="4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.271561 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.284664 4852 scope.go:117] "RemoveContainer" containerID="5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.290370 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bn4t7"] Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.405515 4852 scope.go:117] "RemoveContainer" containerID="4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255" Dec 01 21:19:21 crc kubenswrapper[4852]: E1201 21:19:21.406058 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255\": container with ID starting with 4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255 not found: ID does not exist" containerID="4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.406111 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255"} err="failed to get container status \"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255\": rpc error: code = NotFound desc = could not find container \"4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255\": container with ID starting with 4474787f21b7fba209c5ca1b28453bbccc58607da496465e5934597a9e8c1255 not found: ID does not exist" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.406146 4852 scope.go:117] "RemoveContainer" containerID="4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780" Dec 01 21:19:21 crc kubenswrapper[4852]: E1201 21:19:21.408184 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780\": container with ID starting with 4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780 not found: ID does not exist" containerID="4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.408216 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780"} err="failed to get container status \"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780\": rpc error: code = NotFound desc = could not find container \"4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780\": container with ID starting with 4fe2e69e647fce3102c0cd271b4e023fd5d0344b9fc7decd44a2ddb01498a780 not found: ID does not exist" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.408239 4852 scope.go:117] "RemoveContainer" containerID="5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e" Dec 01 21:19:21 crc kubenswrapper[4852]: E1201 21:19:21.408497 4852 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e\": container with ID starting with 5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e not found: ID does not exist" containerID="5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e" Dec 01 21:19:21 crc kubenswrapper[4852]: I1201 21:19:21.408525 4852 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e"} err="failed to get container status \"5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e\": rpc error: code = NotFound desc = could not find container \"5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e\": container with ID starting with 5d64b2a1adff37da5207c7a8b7be30235a8e5cba0ae3daf3a5fe6870c7c5f69e not found: ID does not exist" Dec 01 21:19:22 crc kubenswrapper[4852]: I1201 21:19:22.334924 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="197712b4-c784-4b37-a22a-6c2a0ad41a92" path="/var/lib/kubelet/pods/197712b4-c784-4b37-a22a-6c2a0ad41a92/volumes" Dec 01 21:19:23 crc kubenswrapper[4852]: I1201 21:19:23.231524 4852 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:23 crc kubenswrapper[4852]: I1201 21:19:23.332835 4852 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:24 crc kubenswrapper[4852]: I1201 21:19:24.231499 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:25 crc kubenswrapper[4852]: I1201 21:19:25.254942 4852 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9dzgc" podUID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" containerName="registry-server" containerID="cri-o://ca5f4356027409200560729dc77231d9f8bc3b7b28c5bf91062a811310c41602" gracePeriod=2 Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.271253 4852 generic.go:334] "Generic (PLEG): container finished" podID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" containerID="ca5f4356027409200560729dc77231d9f8bc3b7b28c5bf91062a811310c41602" exitCode=0 Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.271314 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerDied","Data":"ca5f4356027409200560729dc77231d9f8bc3b7b28c5bf91062a811310c41602"} Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.272013 4852 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dzgc" event={"ID":"90fb04e0-a9ea-4fe4-befa-964836dc7ca1","Type":"ContainerDied","Data":"7b43ac9d34e1321753bc2bc17418bdb90754aa23b97100bb338ee387c262a0c1"} Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.272048 4852 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b43ac9d34e1321753bc2bc17418bdb90754aa23b97100bb338ee387c262a0c1" Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.862362 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.968492 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities\") pod \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.968667 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxrfp\" (UniqueName: \"kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp\") pod \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.968718 4852 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content\") pod \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\" (UID: \"90fb04e0-a9ea-4fe4-befa-964836dc7ca1\") " Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.969306 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities" (OuterVolumeSpecName: "utilities") pod "90fb04e0-a9ea-4fe4-befa-964836dc7ca1" (UID: "90fb04e0-a9ea-4fe4-befa-964836dc7ca1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:19:26 crc kubenswrapper[4852]: I1201 21:19:26.975524 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp" (OuterVolumeSpecName: "kube-api-access-xxrfp") pod "90fb04e0-a9ea-4fe4-befa-964836dc7ca1" (UID: "90fb04e0-a9ea-4fe4-befa-964836dc7ca1"). InnerVolumeSpecName "kube-api-access-xxrfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.071177 4852 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.071214 4852 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxrfp\" (UniqueName: \"kubernetes.io/projected/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-kube-api-access-xxrfp\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.100221 4852 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90fb04e0-a9ea-4fe4-befa-964836dc7ca1" (UID: "90fb04e0-a9ea-4fe4-befa-964836dc7ca1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.173592 4852 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90fb04e0-a9ea-4fe4-befa-964836dc7ca1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.283284 4852 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dzgc" Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.329743 4852 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:27 crc kubenswrapper[4852]: I1201 21:19:27.343134 4852 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9dzgc"] Dec 01 21:19:28 crc kubenswrapper[4852]: I1201 21:19:28.338159 4852 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90fb04e0-a9ea-4fe4-befa-964836dc7ca1" path="/var/lib/kubelet/pods/90fb04e0-a9ea-4fe4-befa-964836dc7ca1/volumes" Dec 01 21:19:33 crc kubenswrapper[4852]: I1201 21:19:33.319927 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:19:33 crc kubenswrapper[4852]: E1201 21:19:33.320751 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:19:47 crc kubenswrapper[4852]: I1201 21:19:47.320928 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:19:47 crc kubenswrapper[4852]: E1201 21:19:47.321581 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:19:58 crc kubenswrapper[4852]: I1201 21:19:58.331850 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:19:58 crc kubenswrapper[4852]: E1201 21:19:58.333259 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:20:12 crc kubenswrapper[4852]: I1201 21:20:12.321070 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:20:12 crc kubenswrapper[4852]: E1201 21:20:12.322210 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:20:23 crc kubenswrapper[4852]: I1201 21:20:23.320589 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:20:23 crc kubenswrapper[4852]: E1201 21:20:23.321734 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:20:35 crc kubenswrapper[4852]: I1201 21:20:35.319761 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:20:35 crc kubenswrapper[4852]: E1201 21:20:35.320741 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:20:49 crc kubenswrapper[4852]: I1201 21:20:49.320600 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:20:49 crc kubenswrapper[4852]: E1201 21:20:49.322019 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" Dec 01 21:21:04 crc kubenswrapper[4852]: I1201 21:21:04.378555 4852 scope.go:117] "RemoveContainer" containerID="7eb56d53ea53bf62f3679a93b52e2327dd0499824584a41c7800550426d0afd1" Dec 01 21:21:04 crc kubenswrapper[4852]: E1201 21:21:04.379886 4852 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-j25pb_openshift-machine-config-operator(e823f9e3-954c-4254-9f06-893905a28152)\"" pod="openshift-machine-config-operator/machine-config-daemon-j25pb" podUID="e823f9e3-954c-4254-9f06-893905a28152" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113403314024440 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113403314017355 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113372017016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113372020015447 5ustar corecore